From ed407c84273280e810890e025e5a0e7c7a4593ec Mon Sep 17 00:00:00 2001 From: Daniel Kentfield Date: Mon, 25 Nov 2024 16:41:32 +0000 Subject: [PATCH] Add rules reader --- README.md | 42 +- action.yml | 10 +- dist/index.js | 11245 ++++++++++++++++++++++++++++++++++++++------ dist/index.js.map | 2 +- package.json | 10 +- rules.yaml | 23 + src/main.test.ts | 139 + src/main.ts | 130 +- yarn.lock | 825 +++- 9 files changed, 11065 insertions(+), 1361 deletions(-) create mode 100644 rules.yaml create mode 100644 src/main.test.ts diff --git a/README.md b/README.md index 566a3ea..04558ea 100644 --- a/README.md +++ b/README.md @@ -43,19 +43,49 @@ jobs: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # The GITHUB_TOKEN is there by default so you just need to keep it like it is and not necessarily need to add it as secret as it will throw an error. [More Details](https://docs.github.com/en/actions/security-guides/automatic-token-authentication#about-the-github_token-secret) OPENAI_API_KEY: ${{ secrets.OPENAI_API_KEY }} OPENAI_API_MODEL: "gpt-4" # Optional: defaults to "gpt-4" - exclude: "**/*.json, **/*.md" # Optional: exclude patterns separated by commas + rules_file_name: "rules.yaml" # Required ``` -4. Replace `your-username` with your GitHub username or organization name where the AI Code Reviewer repository is +4. Create a `.github/workflows/rules.yaml` file in your repository and customise the rules you would like: + +```yaml +# .github/workflows/rules.yaml + +# Global rules that apply to all files/directories unless overridden +global: [ + "Always be concise", + "Always consider security vulnerabilities" +] + +# Rules for specific file extensions - keys are glob patterns +extensions: + [".ts", ".js"]: ["Always use export default"] + "tsx": ["Always use arrow functions"] + +# Rules for specific directories - keys are glob patterns +directories: + "**/resolvers/**": ["Always use mocks in tests"] + "src/**": ["Always write tests"] + +# Paths to ignore completely +ignore: + - ".git" + - "node_modules" + - "dist" + - "build" + - "_build" + - "csv" + - "json" +``` + +5. Replace `your-username` with your GitHub username or organization name where the AI Code Reviewer repository is located. -5. Customize the `exclude` input if you want to ignore certain file patterns from being reviewed. - -6. Commit the changes to your repository, and AI Code Reviewer will start working on your future pull requests. +7. Commit the changes to your repository, and AI Code Reviewer will start working on your future pull requests. ## How It Works -The AI Code Reviewer GitHub Action retrieves the pull request diff, filters out excluded files, and sends code chunks to +The AI Code Reviewer GitHub Action retrieves the pull request diff, filters out excluded files, filters the coding style rules from rules.yaml to only include those relevant to the file, and sends code chunks to the OpenAI API. It then generates review comments based on the AI's response and adds them to the pull request. ## Contributing diff --git a/action.yml b/action.yml index 976aed7..e7adfdd 100644 --- a/action.yml +++ b/action.yml @@ -1,5 +1,5 @@ -name: "AI Code Review Action" -description: "Perform code reviews and comment on diffs using OpenAI API." +name: "AI Code Reviewer" +description: "Perform code reviews and comment on diffs with your own rules using OpenAI API." inputs: GITHUB_TOKEN: description: "GitHub token to interact with the repository." @@ -11,9 +11,9 @@ inputs: description: "OpenAI API model." required: false default: "gpt-4" - exclude: - description: "Glob patterns to exclude files from the diff analysis" - required: false + rules_file_name: + description: "YAML file with Glob patterns to set specific rules (prompts) for file extensions, directories, global rules, or exclude files from the diff analysis" + required: true default: "" runs: using: "node16" diff --git a/dist/index.js b/dist/index.js index 258b475..179fd73 100644 --- a/dist/index.js +++ b/dist/index.js @@ -42,12 +42,14 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getApplicableRules = exports.readRules = void 0; const fs_1 = __nccwpck_require__(7147); const core = __importStar(__nccwpck_require__(2186)); const openai_1 = __importDefault(__nccwpck_require__(47)); const rest_1 = __nccwpck_require__(5375); const parse_diff_1 = __importDefault(__nccwpck_require__(4833)); const minimatch_1 = __importDefault(__nccwpck_require__(2002)); +const yaml_1 = __nccwpck_require__(4083); const GITHUB_TOKEN = core.getInput("GITHUB_TOKEN"); const OPENAI_API_KEY = core.getInput("OPENAI_API_KEY"); const OPENAI_API_MODEL = core.getInput("OPENAI_API_MODEL"); @@ -85,14 +87,14 @@ function getDiff(owner, repo, pull_number) { return response.data; }); } -function analyzeCode(parsedDiff, prDetails) { +function analyzeCode(parsedDiff, prDetails, rules) { return __awaiter(this, void 0, void 0, function* () { const comments = []; for (const file of parsedDiff) { if (file.to === "/dev/null") continue; // Ignore deleted files for (const chunk of file.chunks) { - const prompt = createPrompt(file, chunk, prDetails); + const prompt = createPrompt(file, chunk, prDetails, rules); const aiResponse = yield getAIResponse(prompt); if (aiResponse) { const newComments = createComment(file, chunk, aiResponse); @@ -105,7 +107,20 @@ function analyzeCode(parsedDiff, prDetails) { return comments; }); } -function createPrompt(file, chunk, prDetails) { +function getApplicableRules(rules, file) { + const { extensions, directories, global } = rules; + const extensionsKeys = Object.keys(extensions); + const applicableExtensionKeys = extensionsKeys.filter((key) => { var _a; return (_a = file.to) === null || _a === void 0 ? void 0 : _a.endsWith(key); }); + const extensionRules = applicableExtensionKeys.flatMap((key) => extensions[key]); + const directoriesKeys = Object.keys(directories); + const applicableDirectoriesKeys = directoriesKeys.filter((key) => file.to ? (0, minimatch_1.default)(file.to, key) : false); + const directoryRules = applicableDirectoriesKeys.flatMap((key) => directories[key]); + return [...global, ...extensionRules, ...directoryRules]; +} +exports.getApplicableRules = getApplicableRules; +function createPrompt(file, chunk, prDetails, rules) { + var _a; + const applicableRules = getApplicableRules(rules, file); return `Your task is to review pull requests. Instructions: - Provide the response in following JSON format: {"reviews": [{"lineNumber": , "reviewComment": ""}]} - Do not give positive comments or compliments. @@ -115,6 +130,10 @@ function createPrompt(file, chunk, prDetails) { - IMPORTANT: NEVER suggest adding comments to the code. Review the following code diff in the file "${file.to}" and take the pull request title and description into account when writing the response. + +${(_a = applicableRules.length) !== null && _a !== void 0 ? _a : (`Always check the following rules to write the review: + ${applicableRules.join("\n")} +`)} Pull request title: ${prDetails.title} Pull request description: @@ -146,9 +165,7 @@ function getAIResponse(prompt) { presence_penalty: 0, }; try { - const response = yield openai.chat.completions.create(Object.assign(Object.assign(Object.assign({}, queryConfig), (OPENAI_API_MODEL === "gpt-4-1106-preview" - ? { response_format: { type: "json_object" } } - : {})), { messages: [ + const response = yield openai.chat.completions.create(Object.assign(Object.assign({}, queryConfig), { response_format: { type: "json_object" }, messages: [ { role: "system", content: prompt, @@ -218,23 +235,56 @@ function main() { return; } const parsedDiff = (0, parse_diff_1.default)(diff); - const excludePatterns = core - .getInput("exclude") - .split(",") - .map((s) => s.trim()); + const rulesFiles = core + .getInput("rules_file_name") + .trim(); + const rules = readRules(rulesFiles); + const { ignore: allIgnorePatterns = [] } = rules; const filteredDiff = parsedDiff.filter((file) => { - return !excludePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : "", pattern); }); + return !allIgnorePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : "", pattern); }); }); - const comments = yield analyzeCode(filteredDiff, prDetails); + const comments = yield analyzeCode(filteredDiff, prDetails, rules); if (comments.length > 0) { yield createReviewComment(prDetails.owner, prDetails.repo, prDetails.pull_number, comments); } }); } -main().catch((error) => { - console.error("Error:", error); - process.exit(1); -}); +function readRules(fileName) { + const fileContents = (0, fs_1.readFileSync)(fileName, "utf8"); + const parsed = (0, yaml_1.parse)(fileContents, { mapAsMap: true }); + // Transform the parsed Map into a plain object + const rules = { + global: parsed.get('global') || [], + extensions: {}, + directories: {}, + ignore: parsed.get('ignore') || [] + }; + // Handle extensions map + const extensionsMap = parsed.get('extensions'); + if (extensionsMap instanceof Map) { + for (const [key, value] of extensionsMap.entries()) { + // If key is an array, create an entry for each extension + if (Array.isArray(key)) { + key.forEach(ext => { + rules.extensions[ext] = value; + }); + } + else { + rules.extensions[key] = value; + } + } + } + // Handle directories map + const directoriesMap = parsed.get('directories'); + if (directoriesMap instanceof Map) { + for (const [key, value] of directoriesMap.entries()) { + rules.directories[key] = value; + } + } + return rules; +} +exports.readRules = readRules; +exports["default"] = main; /***/ }), @@ -14398,1303 +14448,1303 @@ exports.parseURL = __nccwpck_require__(2158).parseURL; /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; - -const punycode = __nccwpck_require__(5477); -const tr46 = __nccwpck_require__(4256); - -const specialSchemes = { - ftp: 21, - file: null, - gopher: 70, - http: 80, - https: 443, - ws: 80, - wss: 443 -}; - -const failure = Symbol("failure"); - -function countSymbols(str) { - return punycode.ucs2.decode(str).length; -} - -function at(input, idx) { - const c = input[idx]; - return isNaN(c) ? undefined : String.fromCodePoint(c); -} - -function isASCIIDigit(c) { - return c >= 0x30 && c <= 0x39; -} - -function isASCIIAlpha(c) { - return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); -} - -function isASCIIAlphanumeric(c) { - return isASCIIAlpha(c) || isASCIIDigit(c); -} - -function isASCIIHex(c) { - return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); -} - -function isSingleDot(buffer) { - return buffer === "." || buffer.toLowerCase() === "%2e"; -} - -function isDoubleDot(buffer) { - buffer = buffer.toLowerCase(); - return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; -} - -function isWindowsDriveLetterCodePoints(cp1, cp2) { - return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); -} - -function isWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); -} - -function isNormalizedWindowsDriveLetterString(string) { - return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; -} - -function containsForbiddenHostCodePoint(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function containsForbiddenHostCodePointExcludingPercent(string) { - return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; -} - -function isSpecialScheme(scheme) { - return specialSchemes[scheme] !== undefined; -} - -function isSpecial(url) { - return isSpecialScheme(url.scheme); -} - -function defaultPort(scheme) { - return specialSchemes[scheme]; -} - -function percentEncode(c) { - let hex = c.toString(16).toUpperCase(); - if (hex.length === 1) { - hex = "0" + hex; - } - - return "%" + hex; -} - -function utf8PercentEncode(c) { - const buf = new Buffer(c); - - let str = ""; - - for (let i = 0; i < buf.length; ++i) { - str += percentEncode(buf[i]); - } - - return str; -} - -function utf8PercentDecode(str) { - const input = new Buffer(str); - const output = []; - for (let i = 0; i < input.length; ++i) { - if (input[i] !== 37) { - output.push(input[i]); - } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { - output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); - i += 2; - } else { - output.push(input[i]); - } - } - return new Buffer(output).toString(); -} - -function isC0ControlPercentEncode(c) { - return c <= 0x1F || c > 0x7E; -} - -const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); -function isPathPercentEncode(c) { - return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); -} - -const extraUserinfoPercentEncodeSet = - new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); -function isUserinfoPercentEncode(c) { - return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); -} - -function percentEncodeChar(c, encodeSetPredicate) { - const cStr = String.fromCodePoint(c); - - if (encodeSetPredicate(c)) { - return utf8PercentEncode(cStr); - } - - return cStr; -} - -function parseIPv4Number(input) { - let R = 10; - - if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { - input = input.substring(2); - R = 16; - } else if (input.length >= 2 && input.charAt(0) === "0") { - input = input.substring(1); - R = 8; - } - - if (input === "") { - return 0; - } - - const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); - if (regex.test(input)) { - return failure; - } - - return parseInt(input, R); -} - -function parseIPv4(input) { - const parts = input.split("."); - if (parts[parts.length - 1] === "") { - if (parts.length > 1) { - parts.pop(); - } - } - - if (parts.length > 4) { - return input; - } - - const numbers = []; - for (const part of parts) { - if (part === "") { - return input; - } - const n = parseIPv4Number(part); - if (n === failure) { - return input; - } - - numbers.push(n); - } - - for (let i = 0; i < numbers.length - 1; ++i) { - if (numbers[i] > 255) { - return failure; - } - } - if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { - return failure; - } - - let ipv4 = numbers.pop(); - let counter = 0; - - for (const n of numbers) { - ipv4 += n * Math.pow(256, 3 - counter); - ++counter; - } - - return ipv4; -} - -function serializeIPv4(address) { - let output = ""; - let n = address; - - for (let i = 1; i <= 4; ++i) { - output = String(n % 256) + output; - if (i !== 4) { - output = "." + output; - } - n = Math.floor(n / 256); - } - - return output; -} - -function parseIPv6(input) { - const address = [0, 0, 0, 0, 0, 0, 0, 0]; - let pieceIndex = 0; - let compress = null; - let pointer = 0; - - input = punycode.ucs2.decode(input); - - if (input[pointer] === 58) { - if (input[pointer + 1] !== 58) { - return failure; - } - - pointer += 2; - ++pieceIndex; - compress = pieceIndex; - } - - while (pointer < input.length) { - if (pieceIndex === 8) { - return failure; - } - - if (input[pointer] === 58) { - if (compress !== null) { - return failure; - } - ++pointer; - ++pieceIndex; - compress = pieceIndex; - continue; - } - - let value = 0; - let length = 0; - - while (length < 4 && isASCIIHex(input[pointer])) { - value = value * 0x10 + parseInt(at(input, pointer), 16); - ++pointer; - ++length; - } - - if (input[pointer] === 46) { - if (length === 0) { - return failure; - } - - pointer -= length; - - if (pieceIndex > 6) { - return failure; - } - - let numbersSeen = 0; - - while (input[pointer] !== undefined) { - let ipv4Piece = null; - - if (numbersSeen > 0) { - if (input[pointer] === 46 && numbersSeen < 4) { - ++pointer; - } else { - return failure; - } - } - - if (!isASCIIDigit(input[pointer])) { - return failure; - } - - while (isASCIIDigit(input[pointer])) { - const number = parseInt(at(input, pointer)); - if (ipv4Piece === null) { - ipv4Piece = number; - } else if (ipv4Piece === 0) { - return failure; - } else { - ipv4Piece = ipv4Piece * 10 + number; - } - if (ipv4Piece > 255) { - return failure; - } - ++pointer; - } - - address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; - - ++numbersSeen; - - if (numbersSeen === 2 || numbersSeen === 4) { - ++pieceIndex; - } - } - - if (numbersSeen !== 4) { - return failure; - } - - break; - } else if (input[pointer] === 58) { - ++pointer; - if (input[pointer] === undefined) { - return failure; - } - } else if (input[pointer] !== undefined) { - return failure; - } - - address[pieceIndex] = value; - ++pieceIndex; - } - - if (compress !== null) { - let swaps = pieceIndex - compress; - pieceIndex = 7; - while (pieceIndex !== 0 && swaps > 0) { - const temp = address[compress + swaps - 1]; - address[compress + swaps - 1] = address[pieceIndex]; - address[pieceIndex] = temp; - --pieceIndex; - --swaps; - } - } else if (compress === null && pieceIndex !== 8) { - return failure; - } - - return address; -} - -function serializeIPv6(address) { - let output = ""; - const seqResult = findLongestZeroSequence(address); - const compress = seqResult.idx; - let ignore0 = false; - - for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { - if (ignore0 && address[pieceIndex] === 0) { - continue; - } else if (ignore0) { - ignore0 = false; - } - - if (compress === pieceIndex) { - const separator = pieceIndex === 0 ? "::" : ":"; - output += separator; - ignore0 = true; - continue; - } - - output += address[pieceIndex].toString(16); - - if (pieceIndex !== 7) { - output += ":"; - } - } - - return output; -} - -function parseHost(input, isSpecialArg) { - if (input[0] === "[") { - if (input[input.length - 1] !== "]") { - return failure; - } - - return parseIPv6(input.substring(1, input.length - 1)); - } - - if (!isSpecialArg) { - return parseOpaqueHost(input); - } - - const domain = utf8PercentDecode(input); - const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); - if (asciiDomain === null) { - return failure; - } - - if (containsForbiddenHostCodePoint(asciiDomain)) { - return failure; - } - - const ipv4Host = parseIPv4(asciiDomain); - if (typeof ipv4Host === "number" || ipv4Host === failure) { - return ipv4Host; - } - - return asciiDomain; -} - -function parseOpaqueHost(input) { - if (containsForbiddenHostCodePointExcludingPercent(input)) { - return failure; - } - - let output = ""; - const decoded = punycode.ucs2.decode(input); - for (let i = 0; i < decoded.length; ++i) { - output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); - } - return output; -} - -function findLongestZeroSequence(arr) { - let maxIdx = null; - let maxLen = 1; // only find elements > 1 - let currStart = null; - let currLen = 0; - - for (let i = 0; i < arr.length; ++i) { - if (arr[i] !== 0) { - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - currStart = null; - currLen = 0; - } else { - if (currStart === null) { - currStart = i; - } - ++currLen; - } - } - - // if trailing zeros - if (currLen > maxLen) { - maxIdx = currStart; - maxLen = currLen; - } - - return { - idx: maxIdx, - len: maxLen - }; -} - -function serializeHost(host) { - if (typeof host === "number") { - return serializeIPv4(host); - } - - // IPv6 serializer - if (host instanceof Array) { - return "[" + serializeIPv6(host) + "]"; - } - - return host; -} - -function trimControlChars(url) { - return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); -} - -function trimTabAndNewline(url) { - return url.replace(/\u0009|\u000A|\u000D/g, ""); -} - -function shortenPath(url) { - const path = url.path; - if (path.length === 0) { - return; - } - if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { - return; - } - - path.pop(); -} - -function includesCredentials(url) { - return url.username !== "" || url.password !== ""; -} - -function cannotHaveAUsernamePasswordPort(url) { - return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; -} - -function isNormalizedWindowsDriveLetter(string) { - return /^[A-Za-z]:$/.test(string); -} - -function URLStateMachine(input, base, encodingOverride, url, stateOverride) { - this.pointer = 0; - this.input = input; - this.base = base || null; - this.encodingOverride = encodingOverride || "utf-8"; - this.stateOverride = stateOverride; - this.url = url; - this.failure = false; - this.parseError = false; - - if (!this.url) { - this.url = { - scheme: "", - username: "", - password: "", - host: null, - port: null, - path: [], - query: null, - fragment: null, - - cannotBeABaseURL: false - }; - - const res = trimControlChars(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - } - - const res = trimTabAndNewline(this.input); - if (res !== this.input) { - this.parseError = true; - } - this.input = res; - - this.state = stateOverride || "scheme start"; - - this.buffer = ""; - this.atFlag = false; - this.arrFlag = false; - this.passwordTokenSeenFlag = false; - - this.input = punycode.ucs2.decode(this.input); - - for (; this.pointer <= this.input.length; ++this.pointer) { - const c = this.input[this.pointer]; - const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); - - // exec state machine - const ret = this["parse " + this.state](c, cStr); - if (!ret) { - break; // terminate algorithm - } else if (ret === failure) { - this.failure = true; - break; - } - } -} - -URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { - if (isASCIIAlpha(c)) { - this.buffer += cStr.toLowerCase(); - this.state = "scheme"; - } else if (!this.stateOverride) { - this.state = "no scheme"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { - if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { - this.buffer += cStr.toLowerCase(); - } else if (c === 58) { - if (this.stateOverride) { - if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { - return false; - } - - if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { - return false; - } - - if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { - return false; - } - - if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { - return false; - } - } - this.url.scheme = this.buffer; - this.buffer = ""; - if (this.stateOverride) { - return false; - } - if (this.url.scheme === "file") { - if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { - this.parseError = true; - } - this.state = "file"; - } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { - this.state = "special relative or authority"; - } else if (isSpecial(this.url)) { - this.state = "special authority slashes"; - } else if (this.input[this.pointer + 1] === 47) { - this.state = "path or authority"; - ++this.pointer; - } else { - this.url.cannotBeABaseURL = true; - this.url.path.push(""); - this.state = "cannot-be-a-base-URL path"; - } - } else if (!this.stateOverride) { - this.buffer = ""; - this.state = "no scheme"; - this.pointer = -1; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { - if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { - return failure; - } else if (this.base.cannotBeABaseURL && c === 35) { - this.url.scheme = this.base.scheme; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.url.cannotBeABaseURL = true; - this.state = "fragment"; - } else if (this.base.scheme === "file") { - this.state = "file"; - --this.pointer; - } else { - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "relative"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { - if (c === 47) { - this.state = "authority"; - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative"] = function parseRelative(c) { - this.url.scheme = this.base.scheme; - if (isNaN(c)) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 47) { - this.state = "relative slash"; - } else if (c === 63) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else if (isSpecial(this.url) && c === 92) { - this.parseError = true; - this.state = "relative slash"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.url.path = this.base.path.slice(0, this.base.path.length - 1); - - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { - if (isSpecial(this.url) && (c === 47 || c === 92)) { - if (c === 92) { - this.parseError = true; - } - this.state = "special authority ignore slashes"; - } else if (c === 47) { - this.state = "authority"; - } else { - this.url.username = this.base.username; - this.url.password = this.base.password; - this.url.host = this.base.host; - this.url.port = this.base.port; - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { - if (c === 47 && this.input[this.pointer + 1] === 47) { - this.state = "special authority ignore slashes"; - ++this.pointer; - } else { - this.parseError = true; - this.state = "special authority ignore slashes"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { - if (c !== 47 && c !== 92) { - this.state = "authority"; - --this.pointer; - } else { - this.parseError = true; - } - - return true; -}; - -URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { - if (c === 64) { - this.parseError = true; - if (this.atFlag) { - this.buffer = "%40" + this.buffer; - } - this.atFlag = true; - - // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars - const len = countSymbols(this.buffer); - for (let pointer = 0; pointer < len; ++pointer) { - const codePoint = this.buffer.codePointAt(pointer); - - if (codePoint === 58 && !this.passwordTokenSeenFlag) { - this.passwordTokenSeenFlag = true; - continue; - } - const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); - if (this.passwordTokenSeenFlag) { - this.url.password += encodedCodePoints; - } else { - this.url.username += encodedCodePoints; - } - } - this.buffer = ""; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - if (this.atFlag && this.buffer === "") { - this.parseError = true; - return failure; - } - this.pointer -= countSymbols(this.buffer) + 1; - this.buffer = ""; - this.state = "host"; - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse hostname"] = -URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { - if (this.stateOverride && this.url.scheme === "file") { - --this.pointer; - this.state = "file host"; - } else if (c === 58 && !this.arrFlag) { - if (this.buffer === "") { - this.parseError = true; - return failure; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "port"; - if (this.stateOverride === "hostname") { - return false; - } - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92)) { - --this.pointer; - if (isSpecial(this.url) && this.buffer === "") { - this.parseError = true; - return failure; - } else if (this.stateOverride && this.buffer === "" && - (includesCredentials(this.url) || this.url.port !== null)) { - this.parseError = true; - return false; - } - - const host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - - this.url.host = host; - this.buffer = ""; - this.state = "path start"; - if (this.stateOverride) { - return false; - } - } else { - if (c === 91) { - this.arrFlag = true; - } else if (c === 93) { - this.arrFlag = false; - } - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { - if (isASCIIDigit(c)) { - this.buffer += cStr; - } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || - (isSpecial(this.url) && c === 92) || - this.stateOverride) { - if (this.buffer !== "") { - const port = parseInt(this.buffer); - if (port > Math.pow(2, 16) - 1) { - this.parseError = true; - return failure; - } - this.url.port = port === defaultPort(this.url.scheme) ? null : port; - this.buffer = ""; - } - if (this.stateOverride) { - return false; - } - this.state = "path start"; - --this.pointer; - } else { - this.parseError = true; - return failure; - } - - return true; -}; - -const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); - -URLStateMachine.prototype["parse file"] = function parseFile(c) { - this.url.scheme = "file"; - - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file slash"; - } else if (this.base !== null && this.base.scheme === "file") { - if (isNaN(c)) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - } else if (c === 63) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - this.url.query = this.base.query; - this.url.fragment = ""; - this.state = "fragment"; - } else { - if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points - !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || - (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points - !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { - this.url.host = this.base.host; - this.url.path = this.base.path.slice(); - shortenPath(this.url); - } else { - this.parseError = true; - } - - this.state = "path"; - --this.pointer; - } - } else { - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { - if (c === 47 || c === 92) { - if (c === 92) { - this.parseError = true; - } - this.state = "file host"; - } else { - if (this.base !== null && this.base.scheme === "file") { - if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { - this.url.path.push(this.base.path[0]); - } else { - this.url.host = this.base.host; - } - } - this.state = "path"; - --this.pointer; - } - - return true; -}; - -URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { - if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { - --this.pointer; - if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { - this.parseError = true; - this.state = "path"; - } else if (this.buffer === "") { - this.url.host = ""; - if (this.stateOverride) { - return false; - } - this.state = "path start"; - } else { - let host = parseHost(this.buffer, isSpecial(this.url)); - if (host === failure) { - return failure; - } - if (host === "localhost") { - host = ""; - } - this.url.host = host; - - if (this.stateOverride) { - return false; - } - - this.buffer = ""; - this.state = "path start"; - } - } else { - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { - if (isSpecial(this.url)) { - if (c === 92) { - this.parseError = true; - } - this.state = "path"; - - if (c !== 47 && c !== 92) { - --this.pointer; - } - } else if (!this.stateOverride && c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (!this.stateOverride && c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else if (c !== undefined) { - this.state = "path"; - if (c !== 47) { - --this.pointer; - } - } - - return true; -}; - -URLStateMachine.prototype["parse path"] = function parsePath(c) { - if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || - (!this.stateOverride && (c === 63 || c === 35))) { - if (isSpecial(this.url) && c === 92) { - this.parseError = true; - } - - if (isDoubleDot(this.buffer)) { - shortenPath(this.url); - if (c !== 47 && !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } - } else if (isSingleDot(this.buffer) && c !== 47 && - !(isSpecial(this.url) && c === 92)) { - this.url.path.push(""); - } else if (!isSingleDot(this.buffer)) { - if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { - if (this.url.host !== "" && this.url.host !== null) { - this.parseError = true; - this.url.host = ""; - } - this.buffer = this.buffer[0] + ":"; - } - this.url.path.push(this.buffer); - } - this.buffer = ""; - if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { - while (this.url.path.length > 1 && this.url.path[0] === "") { - this.parseError = true; - this.url.path.shift(); - } - } - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += percentEncodeChar(c, isPathPercentEncode); - } - - return true; -}; - -URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { - if (c === 63) { - this.url.query = ""; - this.state = "query"; - } else if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } else { - // TODO: Add: not a URL code point - if (!isNaN(c) && c !== 37) { - this.parseError = true; - } - - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - if (!isNaN(c)) { - this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); - } - } - - return true; -}; - -URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { - if (isNaN(c) || (!this.stateOverride && c === 35)) { - if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { - this.encodingOverride = "utf-8"; - } - - const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead - for (let i = 0; i < buffer.length; ++i) { - if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || - buffer[i] === 0x3C || buffer[i] === 0x3E) { - this.url.query += percentEncode(buffer[i]); - } else { - this.url.query += String.fromCodePoint(buffer[i]); - } - } - - this.buffer = ""; - if (c === 35) { - this.url.fragment = ""; - this.state = "fragment"; - } - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.buffer += cStr; - } - - return true; -}; - -URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { - if (isNaN(c)) { // do nothing - } else if (c === 0x0) { - this.parseError = true; - } else { - // TODO: If c is not a URL code point and not "%", parse error. - if (c === 37 && - (!isASCIIHex(this.input[this.pointer + 1]) || - !isASCIIHex(this.input[this.pointer + 2]))) { - this.parseError = true; - } - - this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); - } - - return true; -}; - -function serializeURL(url, excludeFragment) { - let output = url.scheme + ":"; - if (url.host !== null) { - output += "//"; - - if (url.username !== "" || url.password !== "") { - output += url.username; - if (url.password !== "") { - output += ":" + url.password; - } - output += "@"; - } - - output += serializeHost(url.host); - - if (url.port !== null) { - output += ":" + url.port; - } - } else if (url.host === null && url.scheme === "file") { - output += "//"; - } - - if (url.cannotBeABaseURL) { - output += url.path[0]; - } else { - for (const string of url.path) { - output += "/" + string; - } - } - - if (url.query !== null) { - output += "?" + url.query; - } - - if (!excludeFragment && url.fragment !== null) { - output += "#" + url.fragment; - } - - return output; -} - -function serializeOrigin(tuple) { - let result = tuple.scheme + "://"; - result += serializeHost(tuple.host); - - if (tuple.port !== null) { - result += ":" + tuple.port; - } - - return result; -} - -module.exports.serializeURL = serializeURL; - -module.exports.serializeURLOrigin = function (url) { - // https://url.spec.whatwg.org/#concept-url-origin - switch (url.scheme) { - case "blob": - try { - return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); - } catch (e) { - // serializing an opaque origin returns "null" - return "null"; - } - case "ftp": - case "gopher": - case "http": - case "https": - case "ws": - case "wss": - return serializeOrigin({ - scheme: url.scheme, - host: url.host, - port: url.port - }); - case "file": - // spec says "exercise to the reader", chrome says "file://" - return "file://"; - default: - // serializing an opaque origin returns "null" - return "null"; - } -}; - -module.exports.basicURLParse = function (input, options) { - if (options === undefined) { - options = {}; - } - - const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); - if (usm.failure) { - return "failure"; - } - - return usm.url; -}; - -module.exports.setTheUsername = function (url, username) { - url.username = ""; - const decoded = punycode.ucs2.decode(username); - for (let i = 0; i < decoded.length; ++i) { - url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.setThePassword = function (url, password) { - url.password = ""; - const decoded = punycode.ucs2.decode(password); - for (let i = 0; i < decoded.length; ++i) { - url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); - } -}; - -module.exports.serializeHost = serializeHost; - -module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; - -module.exports.serializeInteger = function (integer) { - return String(integer); -}; - -module.exports.parseURL = function (input, options) { - if (options === undefined) { - options = {}; - } - - // We don't handle blobs, so this just delegates: - return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); -}; + +const punycode = __nccwpck_require__(5477); +const tr46 = __nccwpck_require__(4256); + +const specialSchemes = { + ftp: 21, + file: null, + gopher: 70, + http: 80, + https: 443, + ws: 80, + wss: 443 +}; + +const failure = Symbol("failure"); + +function countSymbols(str) { + return punycode.ucs2.decode(str).length; +} + +function at(input, idx) { + const c = input[idx]; + return isNaN(c) ? undefined : String.fromCodePoint(c); +} + +function isASCIIDigit(c) { + return c >= 0x30 && c <= 0x39; +} + +function isASCIIAlpha(c) { + return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A); +} + +function isASCIIAlphanumeric(c) { + return isASCIIAlpha(c) || isASCIIDigit(c); +} + +function isASCIIHex(c) { + return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66); +} + +function isSingleDot(buffer) { + return buffer === "." || buffer.toLowerCase() === "%2e"; +} + +function isDoubleDot(buffer) { + buffer = buffer.toLowerCase(); + return buffer === ".." || buffer === "%2e." || buffer === ".%2e" || buffer === "%2e%2e"; +} + +function isWindowsDriveLetterCodePoints(cp1, cp2) { + return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124); +} + +function isWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === ":" || string[1] === "|"); +} + +function isNormalizedWindowsDriveLetterString(string) { + return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === ":"; +} + +function containsForbiddenHostCodePoint(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|%|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function containsForbiddenHostCodePointExcludingPercent(string) { + return string.search(/\u0000|\u0009|\u000A|\u000D|\u0020|#|\/|:|\?|@|\[|\\|\]/) !== -1; +} + +function isSpecialScheme(scheme) { + return specialSchemes[scheme] !== undefined; +} + +function isSpecial(url) { + return isSpecialScheme(url.scheme); +} + +function defaultPort(scheme) { + return specialSchemes[scheme]; +} + +function percentEncode(c) { + let hex = c.toString(16).toUpperCase(); + if (hex.length === 1) { + hex = "0" + hex; + } + + return "%" + hex; +} + +function utf8PercentEncode(c) { + const buf = new Buffer(c); + + let str = ""; + + for (let i = 0; i < buf.length; ++i) { + str += percentEncode(buf[i]); + } + + return str; +} + +function utf8PercentDecode(str) { + const input = new Buffer(str); + const output = []; + for (let i = 0; i < input.length; ++i) { + if (input[i] !== 37) { + output.push(input[i]); + } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) { + output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16)); + i += 2; + } else { + output.push(input[i]); + } + } + return new Buffer(output).toString(); +} + +function isC0ControlPercentEncode(c) { + return c <= 0x1F || c > 0x7E; +} + +const extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]); +function isPathPercentEncode(c) { + return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c); +} + +const extraUserinfoPercentEncodeSet = + new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]); +function isUserinfoPercentEncode(c) { + return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c); +} + +function percentEncodeChar(c, encodeSetPredicate) { + const cStr = String.fromCodePoint(c); + + if (encodeSetPredicate(c)) { + return utf8PercentEncode(cStr); + } + + return cStr; +} + +function parseIPv4Number(input) { + let R = 10; + + if (input.length >= 2 && input.charAt(0) === "0" && input.charAt(1).toLowerCase() === "x") { + input = input.substring(2); + R = 16; + } else if (input.length >= 2 && input.charAt(0) === "0") { + input = input.substring(1); + R = 8; + } + + if (input === "") { + return 0; + } + + const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/); + if (regex.test(input)) { + return failure; + } + + return parseInt(input, R); +} + +function parseIPv4(input) { + const parts = input.split("."); + if (parts[parts.length - 1] === "") { + if (parts.length > 1) { + parts.pop(); + } + } + + if (parts.length > 4) { + return input; + } + + const numbers = []; + for (const part of parts) { + if (part === "") { + return input; + } + const n = parseIPv4Number(part); + if (n === failure) { + return input; + } + + numbers.push(n); + } + + for (let i = 0; i < numbers.length - 1; ++i) { + if (numbers[i] > 255) { + return failure; + } + } + if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) { + return failure; + } + + let ipv4 = numbers.pop(); + let counter = 0; + + for (const n of numbers) { + ipv4 += n * Math.pow(256, 3 - counter); + ++counter; + } + + return ipv4; +} + +function serializeIPv4(address) { + let output = ""; + let n = address; + + for (let i = 1; i <= 4; ++i) { + output = String(n % 256) + output; + if (i !== 4) { + output = "." + output; + } + n = Math.floor(n / 256); + } + + return output; +} + +function parseIPv6(input) { + const address = [0, 0, 0, 0, 0, 0, 0, 0]; + let pieceIndex = 0; + let compress = null; + let pointer = 0; + + input = punycode.ucs2.decode(input); + + if (input[pointer] === 58) { + if (input[pointer + 1] !== 58) { + return failure; + } + + pointer += 2; + ++pieceIndex; + compress = pieceIndex; + } + + while (pointer < input.length) { + if (pieceIndex === 8) { + return failure; + } + + if (input[pointer] === 58) { + if (compress !== null) { + return failure; + } + ++pointer; + ++pieceIndex; + compress = pieceIndex; + continue; + } + + let value = 0; + let length = 0; + + while (length < 4 && isASCIIHex(input[pointer])) { + value = value * 0x10 + parseInt(at(input, pointer), 16); + ++pointer; + ++length; + } + + if (input[pointer] === 46) { + if (length === 0) { + return failure; + } + + pointer -= length; + + if (pieceIndex > 6) { + return failure; + } + + let numbersSeen = 0; + + while (input[pointer] !== undefined) { + let ipv4Piece = null; + + if (numbersSeen > 0) { + if (input[pointer] === 46 && numbersSeen < 4) { + ++pointer; + } else { + return failure; + } + } + + if (!isASCIIDigit(input[pointer])) { + return failure; + } + + while (isASCIIDigit(input[pointer])) { + const number = parseInt(at(input, pointer)); + if (ipv4Piece === null) { + ipv4Piece = number; + } else if (ipv4Piece === 0) { + return failure; + } else { + ipv4Piece = ipv4Piece * 10 + number; + } + if (ipv4Piece > 255) { + return failure; + } + ++pointer; + } + + address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece; + + ++numbersSeen; + + if (numbersSeen === 2 || numbersSeen === 4) { + ++pieceIndex; + } + } + + if (numbersSeen !== 4) { + return failure; + } + + break; + } else if (input[pointer] === 58) { + ++pointer; + if (input[pointer] === undefined) { + return failure; + } + } else if (input[pointer] !== undefined) { + return failure; + } + + address[pieceIndex] = value; + ++pieceIndex; + } + + if (compress !== null) { + let swaps = pieceIndex - compress; + pieceIndex = 7; + while (pieceIndex !== 0 && swaps > 0) { + const temp = address[compress + swaps - 1]; + address[compress + swaps - 1] = address[pieceIndex]; + address[pieceIndex] = temp; + --pieceIndex; + --swaps; + } + } else if (compress === null && pieceIndex !== 8) { + return failure; + } + + return address; +} + +function serializeIPv6(address) { + let output = ""; + const seqResult = findLongestZeroSequence(address); + const compress = seqResult.idx; + let ignore0 = false; + + for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) { + if (ignore0 && address[pieceIndex] === 0) { + continue; + } else if (ignore0) { + ignore0 = false; + } + + if (compress === pieceIndex) { + const separator = pieceIndex === 0 ? "::" : ":"; + output += separator; + ignore0 = true; + continue; + } + + output += address[pieceIndex].toString(16); + + if (pieceIndex !== 7) { + output += ":"; + } + } + + return output; +} + +function parseHost(input, isSpecialArg) { + if (input[0] === "[") { + if (input[input.length - 1] !== "]") { + return failure; + } + + return parseIPv6(input.substring(1, input.length - 1)); + } + + if (!isSpecialArg) { + return parseOpaqueHost(input); + } + + const domain = utf8PercentDecode(input); + const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false); + if (asciiDomain === null) { + return failure; + } + + if (containsForbiddenHostCodePoint(asciiDomain)) { + return failure; + } + + const ipv4Host = parseIPv4(asciiDomain); + if (typeof ipv4Host === "number" || ipv4Host === failure) { + return ipv4Host; + } + + return asciiDomain; +} + +function parseOpaqueHost(input) { + if (containsForbiddenHostCodePointExcludingPercent(input)) { + return failure; + } + + let output = ""; + const decoded = punycode.ucs2.decode(input); + for (let i = 0; i < decoded.length; ++i) { + output += percentEncodeChar(decoded[i], isC0ControlPercentEncode); + } + return output; +} + +function findLongestZeroSequence(arr) { + let maxIdx = null; + let maxLen = 1; // only find elements > 1 + let currStart = null; + let currLen = 0; + + for (let i = 0; i < arr.length; ++i) { + if (arr[i] !== 0) { + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + currStart = null; + currLen = 0; + } else { + if (currStart === null) { + currStart = i; + } + ++currLen; + } + } + + // if trailing zeros + if (currLen > maxLen) { + maxIdx = currStart; + maxLen = currLen; + } + + return { + idx: maxIdx, + len: maxLen + }; +} + +function serializeHost(host) { + if (typeof host === "number") { + return serializeIPv4(host); + } + + // IPv6 serializer + if (host instanceof Array) { + return "[" + serializeIPv6(host) + "]"; + } + + return host; +} + +function trimControlChars(url) { + return url.replace(/^[\u0000-\u001F\u0020]+|[\u0000-\u001F\u0020]+$/g, ""); +} + +function trimTabAndNewline(url) { + return url.replace(/\u0009|\u000A|\u000D/g, ""); +} + +function shortenPath(url) { + const path = url.path; + if (path.length === 0) { + return; + } + if (url.scheme === "file" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) { + return; + } + + path.pop(); +} + +function includesCredentials(url) { + return url.username !== "" || url.password !== ""; +} + +function cannotHaveAUsernamePasswordPort(url) { + return url.host === null || url.host === "" || url.cannotBeABaseURL || url.scheme === "file"; +} + +function isNormalizedWindowsDriveLetter(string) { + return /^[A-Za-z]:$/.test(string); +} + +function URLStateMachine(input, base, encodingOverride, url, stateOverride) { + this.pointer = 0; + this.input = input; + this.base = base || null; + this.encodingOverride = encodingOverride || "utf-8"; + this.stateOverride = stateOverride; + this.url = url; + this.failure = false; + this.parseError = false; + + if (!this.url) { + this.url = { + scheme: "", + username: "", + password: "", + host: null, + port: null, + path: [], + query: null, + fragment: null, + + cannotBeABaseURL: false + }; + + const res = trimControlChars(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + } + + const res = trimTabAndNewline(this.input); + if (res !== this.input) { + this.parseError = true; + } + this.input = res; + + this.state = stateOverride || "scheme start"; + + this.buffer = ""; + this.atFlag = false; + this.arrFlag = false; + this.passwordTokenSeenFlag = false; + + this.input = punycode.ucs2.decode(this.input); + + for (; this.pointer <= this.input.length; ++this.pointer) { + const c = this.input[this.pointer]; + const cStr = isNaN(c) ? undefined : String.fromCodePoint(c); + + // exec state machine + const ret = this["parse " + this.state](c, cStr); + if (!ret) { + break; // terminate algorithm + } else if (ret === failure) { + this.failure = true; + break; + } + } +} + +URLStateMachine.prototype["parse scheme start"] = function parseSchemeStart(c, cStr) { + if (isASCIIAlpha(c)) { + this.buffer += cStr.toLowerCase(); + this.state = "scheme"; + } else if (!this.stateOverride) { + this.state = "no scheme"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse scheme"] = function parseScheme(c, cStr) { + if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) { + this.buffer += cStr.toLowerCase(); + } else if (c === 58) { + if (this.stateOverride) { + if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) { + return false; + } + + if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) { + return false; + } + + if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === "file") { + return false; + } + + if (this.url.scheme === "file" && (this.url.host === "" || this.url.host === null)) { + return false; + } + } + this.url.scheme = this.buffer; + this.buffer = ""; + if (this.stateOverride) { + return false; + } + if (this.url.scheme === "file") { + if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) { + this.parseError = true; + } + this.state = "file"; + } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) { + this.state = "special relative or authority"; + } else if (isSpecial(this.url)) { + this.state = "special authority slashes"; + } else if (this.input[this.pointer + 1] === 47) { + this.state = "path or authority"; + ++this.pointer; + } else { + this.url.cannotBeABaseURL = true; + this.url.path.push(""); + this.state = "cannot-be-a-base-URL path"; + } + } else if (!this.stateOverride) { + this.buffer = ""; + this.state = "no scheme"; + this.pointer = -1; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +URLStateMachine.prototype["parse no scheme"] = function parseNoScheme(c) { + if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) { + return failure; + } else if (this.base.cannotBeABaseURL && c === 35) { + this.url.scheme = this.base.scheme; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.url.cannotBeABaseURL = true; + this.state = "fragment"; + } else if (this.base.scheme === "file") { + this.state = "file"; + --this.pointer; + } else { + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special relative or authority"] = function parseSpecialRelativeOrAuthority(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "relative"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse path or authority"] = function parsePathOrAuthority(c) { + if (c === 47) { + this.state = "authority"; + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative"] = function parseRelative(c) { + this.url.scheme = this.base.scheme; + if (isNaN(c)) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 47) { + this.state = "relative slash"; + } else if (c === 63) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else if (isSpecial(this.url) && c === 92) { + this.parseError = true; + this.state = "relative slash"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.url.path = this.base.path.slice(0, this.base.path.length - 1); + + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse relative slash"] = function parseRelativeSlash(c) { + if (isSpecial(this.url) && (c === 47 || c === 92)) { + if (c === 92) { + this.parseError = true; + } + this.state = "special authority ignore slashes"; + } else if (c === 47) { + this.state = "authority"; + } else { + this.url.username = this.base.username; + this.url.password = this.base.password; + this.url.host = this.base.host; + this.url.port = this.base.port; + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority slashes"] = function parseSpecialAuthoritySlashes(c) { + if (c === 47 && this.input[this.pointer + 1] === 47) { + this.state = "special authority ignore slashes"; + ++this.pointer; + } else { + this.parseError = true; + this.state = "special authority ignore slashes"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse special authority ignore slashes"] = function parseSpecialAuthorityIgnoreSlashes(c) { + if (c !== 47 && c !== 92) { + this.state = "authority"; + --this.pointer; + } else { + this.parseError = true; + } + + return true; +}; + +URLStateMachine.prototype["parse authority"] = function parseAuthority(c, cStr) { + if (c === 64) { + this.parseError = true; + if (this.atFlag) { + this.buffer = "%40" + this.buffer; + } + this.atFlag = true; + + // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars + const len = countSymbols(this.buffer); + for (let pointer = 0; pointer < len; ++pointer) { + const codePoint = this.buffer.codePointAt(pointer); + + if (codePoint === 58 && !this.passwordTokenSeenFlag) { + this.passwordTokenSeenFlag = true; + continue; + } + const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode); + if (this.passwordTokenSeenFlag) { + this.url.password += encodedCodePoints; + } else { + this.url.username += encodedCodePoints; + } + } + this.buffer = ""; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + if (this.atFlag && this.buffer === "") { + this.parseError = true; + return failure; + } + this.pointer -= countSymbols(this.buffer) + 1; + this.buffer = ""; + this.state = "host"; + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse hostname"] = +URLStateMachine.prototype["parse host"] = function parseHostName(c, cStr) { + if (this.stateOverride && this.url.scheme === "file") { + --this.pointer; + this.state = "file host"; + } else if (c === 58 && !this.arrFlag) { + if (this.buffer === "") { + this.parseError = true; + return failure; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "port"; + if (this.stateOverride === "hostname") { + return false; + } + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92)) { + --this.pointer; + if (isSpecial(this.url) && this.buffer === "") { + this.parseError = true; + return failure; + } else if (this.stateOverride && this.buffer === "" && + (includesCredentials(this.url) || this.url.port !== null)) { + this.parseError = true; + return false; + } + + const host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + + this.url.host = host; + this.buffer = ""; + this.state = "path start"; + if (this.stateOverride) { + return false; + } + } else { + if (c === 91) { + this.arrFlag = true; + } else if (c === 93) { + this.arrFlag = false; + } + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse port"] = function parsePort(c, cStr) { + if (isASCIIDigit(c)) { + this.buffer += cStr; + } else if (isNaN(c) || c === 47 || c === 63 || c === 35 || + (isSpecial(this.url) && c === 92) || + this.stateOverride) { + if (this.buffer !== "") { + const port = parseInt(this.buffer); + if (port > Math.pow(2, 16) - 1) { + this.parseError = true; + return failure; + } + this.url.port = port === defaultPort(this.url.scheme) ? null : port; + this.buffer = ""; + } + if (this.stateOverride) { + return false; + } + this.state = "path start"; + --this.pointer; + } else { + this.parseError = true; + return failure; + } + + return true; +}; + +const fileOtherwiseCodePoints = new Set([47, 92, 63, 35]); + +URLStateMachine.prototype["parse file"] = function parseFile(c) { + this.url.scheme = "file"; + + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file slash"; + } else if (this.base !== null && this.base.scheme === "file") { + if (isNaN(c)) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + } else if (c === 63) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + this.url.query = this.base.query; + this.url.fragment = ""; + this.state = "fragment"; + } else { + if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points + !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) || + (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points + !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) { + this.url.host = this.base.host; + this.url.path = this.base.path.slice(); + shortenPath(this.url); + } else { + this.parseError = true; + } + + this.state = "path"; + --this.pointer; + } + } else { + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file slash"] = function parseFileSlash(c) { + if (c === 47 || c === 92) { + if (c === 92) { + this.parseError = true; + } + this.state = "file host"; + } else { + if (this.base !== null && this.base.scheme === "file") { + if (isNormalizedWindowsDriveLetterString(this.base.path[0])) { + this.url.path.push(this.base.path[0]); + } else { + this.url.host = this.base.host; + } + } + this.state = "path"; + --this.pointer; + } + + return true; +}; + +URLStateMachine.prototype["parse file host"] = function parseFileHost(c, cStr) { + if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) { + --this.pointer; + if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) { + this.parseError = true; + this.state = "path"; + } else if (this.buffer === "") { + this.url.host = ""; + if (this.stateOverride) { + return false; + } + this.state = "path start"; + } else { + let host = parseHost(this.buffer, isSpecial(this.url)); + if (host === failure) { + return failure; + } + if (host === "localhost") { + host = ""; + } + this.url.host = host; + + if (this.stateOverride) { + return false; + } + + this.buffer = ""; + this.state = "path start"; + } + } else { + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse path start"] = function parsePathStart(c) { + if (isSpecial(this.url)) { + if (c === 92) { + this.parseError = true; + } + this.state = "path"; + + if (c !== 47 && c !== 92) { + --this.pointer; + } + } else if (!this.stateOverride && c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (!this.stateOverride && c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else if (c !== undefined) { + this.state = "path"; + if (c !== 47) { + --this.pointer; + } + } + + return true; +}; + +URLStateMachine.prototype["parse path"] = function parsePath(c) { + if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) || + (!this.stateOverride && (c === 63 || c === 35))) { + if (isSpecial(this.url) && c === 92) { + this.parseError = true; + } + + if (isDoubleDot(this.buffer)) { + shortenPath(this.url); + if (c !== 47 && !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } + } else if (isSingleDot(this.buffer) && c !== 47 && + !(isSpecial(this.url) && c === 92)) { + this.url.path.push(""); + } else if (!isSingleDot(this.buffer)) { + if (this.url.scheme === "file" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) { + if (this.url.host !== "" && this.url.host !== null) { + this.parseError = true; + this.url.host = ""; + } + this.buffer = this.buffer[0] + ":"; + } + this.url.path.push(this.buffer); + } + this.buffer = ""; + if (this.url.scheme === "file" && (c === undefined || c === 63 || c === 35)) { + while (this.url.path.length > 1 && this.url.path[0] === "") { + this.parseError = true; + this.url.path.shift(); + } + } + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += percentEncodeChar(c, isPathPercentEncode); + } + + return true; +}; + +URLStateMachine.prototype["parse cannot-be-a-base-URL path"] = function parseCannotBeABaseURLPath(c) { + if (c === 63) { + this.url.query = ""; + this.state = "query"; + } else if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } else { + // TODO: Add: not a URL code point + if (!isNaN(c) && c !== 37) { + this.parseError = true; + } + + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + if (!isNaN(c)) { + this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode); + } + } + + return true; +}; + +URLStateMachine.prototype["parse query"] = function parseQuery(c, cStr) { + if (isNaN(c) || (!this.stateOverride && c === 35)) { + if (!isSpecial(this.url) || this.url.scheme === "ws" || this.url.scheme === "wss") { + this.encodingOverride = "utf-8"; + } + + const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead + for (let i = 0; i < buffer.length; ++i) { + if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 || + buffer[i] === 0x3C || buffer[i] === 0x3E) { + this.url.query += percentEncode(buffer[i]); + } else { + this.url.query += String.fromCodePoint(buffer[i]); + } + } + + this.buffer = ""; + if (c === 35) { + this.url.fragment = ""; + this.state = "fragment"; + } + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.buffer += cStr; + } + + return true; +}; + +URLStateMachine.prototype["parse fragment"] = function parseFragment(c) { + if (isNaN(c)) { // do nothing + } else if (c === 0x0) { + this.parseError = true; + } else { + // TODO: If c is not a URL code point and not "%", parse error. + if (c === 37 && + (!isASCIIHex(this.input[this.pointer + 1]) || + !isASCIIHex(this.input[this.pointer + 2]))) { + this.parseError = true; + } + + this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode); + } + + return true; +}; + +function serializeURL(url, excludeFragment) { + let output = url.scheme + ":"; + if (url.host !== null) { + output += "//"; + + if (url.username !== "" || url.password !== "") { + output += url.username; + if (url.password !== "") { + output += ":" + url.password; + } + output += "@"; + } + + output += serializeHost(url.host); + + if (url.port !== null) { + output += ":" + url.port; + } + } else if (url.host === null && url.scheme === "file") { + output += "//"; + } + + if (url.cannotBeABaseURL) { + output += url.path[0]; + } else { + for (const string of url.path) { + output += "/" + string; + } + } + + if (url.query !== null) { + output += "?" + url.query; + } + + if (!excludeFragment && url.fragment !== null) { + output += "#" + url.fragment; + } + + return output; +} + +function serializeOrigin(tuple) { + let result = tuple.scheme + "://"; + result += serializeHost(tuple.host); + + if (tuple.port !== null) { + result += ":" + tuple.port; + } + + return result; +} + +module.exports.serializeURL = serializeURL; + +module.exports.serializeURLOrigin = function (url) { + // https://url.spec.whatwg.org/#concept-url-origin + switch (url.scheme) { + case "blob": + try { + return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0])); + } catch (e) { + // serializing an opaque origin returns "null" + return "null"; + } + case "ftp": + case "gopher": + case "http": + case "https": + case "ws": + case "wss": + return serializeOrigin({ + scheme: url.scheme, + host: url.host, + port: url.port + }); + case "file": + // spec says "exercise to the reader", chrome says "file://" + return "file://"; + default: + // serializing an opaque origin returns "null" + return "null"; + } +}; + +module.exports.basicURLParse = function (input, options) { + if (options === undefined) { + options = {}; + } + + const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride); + if (usm.failure) { + return "failure"; + } + + return usm.url; +}; + +module.exports.setTheUsername = function (url, username) { + url.username = ""; + const decoded = punycode.ucs2.decode(username); + for (let i = 0; i < decoded.length; ++i) { + url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.setThePassword = function (url, password) { + url.password = ""; + const decoded = punycode.ucs2.decode(password); + for (let i = 0; i < decoded.length; ++i) { + url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode); + } +}; + +module.exports.serializeHost = serializeHost; + +module.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort; + +module.exports.serializeInteger = function (integer) { + return String(integer); +}; + +module.exports.parseURL = function (input, options) { + if (options === undefined) { + options = {}; + } + + // We don't handle blobs, so this just delegates: + return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride }); +}; /***/ }), @@ -22913,6 +22963,8575 @@ exports.VERSION = void 0; exports.VERSION = '4.20.1'; // x-release-please-version //# sourceMappingURL=version.js.map +/***/ }), + +/***/ 8109: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var resolveBlockMap = __nccwpck_require__(2986); +var resolveBlockSeq = __nccwpck_require__(2289); +var resolveFlowCollection = __nccwpck_require__(45); + +function resolveCollection(CN, ctx, token, onError, tagName, tag) { + const coll = token.type === 'block-map' + ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag) + : token.type === 'block-seq' + ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag) + : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag); + const Coll = coll.constructor; + // If we got a tagName matching the class, or the tag name is '!', + // then use the tagName from the node class used to create it. + if (tagName === '!' || tagName === Coll.tagName) { + coll.tag = Coll.tagName; + return coll; + } + if (tagName) + coll.tag = tagName; + return coll; +} +function composeCollection(CN, ctx, token, props, onError) { + const tagToken = props.tag; + const tagName = !tagToken + ? null + : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)); + if (token.type === 'block-seq') { + const { anchor, newlineAfterProp: nl } = props; + const lastProp = anchor && tagToken + ? anchor.offset > tagToken.offset + ? anchor + : tagToken + : (anchor ?? tagToken); + if (lastProp && (!nl || nl.offset < lastProp.offset)) { + const message = 'Missing newline after block sequence props'; + onError(lastProp, 'MISSING_CHAR', message); + } + } + const expType = token.type === 'block-map' + ? 'map' + : token.type === 'block-seq' + ? 'seq' + : token.start.source === '{' + ? 'map' + : 'seq'; + // shortcut: check if it's a generic YAMLMap or YAMLSeq + // before jumping into the custom tag logic. + if (!tagToken || + !tagName || + tagName === '!' || + (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') || + (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq')) { + return resolveCollection(CN, ctx, token, onError, tagName); + } + let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType); + if (!tag) { + const kt = ctx.schema.knownTags[tagName]; + if (kt && kt.collection === expType) { + ctx.schema.tags.push(Object.assign({}, kt, { default: false })); + tag = kt; + } + else { + if (kt?.collection) { + onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true); + } + else { + onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true); + } + return resolveCollection(CN, ctx, token, onError, tagName); + } + } + const coll = resolveCollection(CN, ctx, token, onError, tagName, tag); + const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll; + const node = identity.isNode(res) + ? res + : new Scalar.Scalar(res); + node.range = coll.range; + node.tag = tagName; + if (tag?.format) + node.format = tag.format; + return node; +} + +exports.composeCollection = composeCollection; + + +/***/ }), + +/***/ 5050: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Document = __nccwpck_require__(42); +var composeNode = __nccwpck_require__(8676); +var resolveEnd = __nccwpck_require__(1250); +var resolveProps = __nccwpck_require__(6985); + +function composeDoc(options, directives, { offset, start, value, end }, onError) { + const opts = Object.assign({ _directives: directives }, options); + const doc = new Document.Document(undefined, opts); + const ctx = { + atKey: false, + atRoot: true, + directives: doc.directives, + options: doc.options, + schema: doc.schema + }; + const props = resolveProps.resolveProps(start, { + indicator: 'doc-start', + next: value ?? end?.[0], + offset, + onError, + parentIndent: 0, + startOnNewline: true + }); + if (props.found) { + doc.directives.docStart = true; + if (value && + (value.type === 'block-map' || value.type === 'block-seq') && + !props.hasNewline) + onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker'); + } + // @ts-expect-error If Contents is set, let's trust the user + doc.contents = value + ? composeNode.composeNode(ctx, value, props, onError) + : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError); + const contentEnd = doc.contents.range[2]; + const re = resolveEnd.resolveEnd(end, contentEnd, false, onError); + if (re.comment) + doc.comment = re.comment; + doc.range = [offset, contentEnd, re.offset]; + return doc; +} + +exports.composeDoc = composeDoc; + + +/***/ }), + +/***/ 8676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var identity = __nccwpck_require__(5589); +var composeCollection = __nccwpck_require__(8109); +var composeScalar = __nccwpck_require__(4766); +var resolveEnd = __nccwpck_require__(1250); +var utilEmptyScalarPosition = __nccwpck_require__(8781); + +const CN = { composeNode, composeEmptyNode }; +function composeNode(ctx, token, props, onError) { + const atKey = ctx.atKey; + const { spaceBefore, comment, anchor, tag } = props; + let node; + let isSrcToken = true; + switch (token.type) { + case 'alias': + node = composeAlias(ctx, token, onError); + if (anchor || tag) + onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties'); + break; + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + case 'block-scalar': + node = composeScalar.composeScalar(ctx, token, tag, onError); + if (anchor) + node.anchor = anchor.source.substring(1); + break; + case 'block-map': + case 'block-seq': + case 'flow-collection': + node = composeCollection.composeCollection(CN, ctx, token, props, onError); + if (anchor) + node.anchor = anchor.source.substring(1); + break; + default: { + const message = token.type === 'error' + ? token.message + : `Unsupported token (type: ${token.type})`; + onError(token, 'UNEXPECTED_TOKEN', message); + node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError); + isSrcToken = false; + } + } + if (anchor && node.anchor === '') + onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); + if (atKey && + ctx.options.stringKeys && + (!identity.isScalar(node) || + typeof node.value !== 'string' || + (node.tag && node.tag !== 'tag:yaml.org,2002:str'))) { + const msg = 'With stringKeys, all keys must be strings'; + onError(tag ?? token, 'NON_STRING_KEY', msg); + } + if (spaceBefore) + node.spaceBefore = true; + if (comment) { + if (token.type === 'scalar' && token.source === '') + node.comment = comment; + else + node.commentBefore = comment; + } + // @ts-expect-error Type checking misses meaning of isSrcToken + if (ctx.options.keepSourceTokens && isSrcToken) + node.srcToken = token; + return node; +} +function composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) { + const token = { + type: 'scalar', + offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos), + indent: -1, + source: '' + }; + const node = composeScalar.composeScalar(ctx, token, tag, onError); + if (anchor) { + node.anchor = anchor.source.substring(1); + if (node.anchor === '') + onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string'); + } + if (spaceBefore) + node.spaceBefore = true; + if (comment) { + node.comment = comment; + node.range[2] = end; + } + return node; +} +function composeAlias({ options }, { offset, source, end }, onError) { + const alias = new Alias.Alias(source.substring(1)); + if (alias.source === '') + onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string'); + if (alias.source.endsWith(':')) + onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true); + const valueEnd = offset + source.length; + const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError); + alias.range = [offset, valueEnd, re.offset]; + if (re.comment) + alias.comment = re.comment; + return alias; +} + +exports.composeEmptyNode = composeEmptyNode; +exports.composeNode = composeNode; + + +/***/ }), + +/***/ 4766: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var resolveBlockScalar = __nccwpck_require__(9485); +var resolveFlowScalar = __nccwpck_require__(7578); + +function composeScalar(ctx, token, tagToken, onError) { + const { value, type, comment, range } = token.type === 'block-scalar' + ? resolveBlockScalar.resolveBlockScalar(ctx, token, onError) + : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError); + const tagName = tagToken + ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg)) + : null; + let tag; + if (ctx.options.stringKeys && ctx.atKey) { + tag = ctx.schema[identity.SCALAR]; + } + else if (tagName) + tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError); + else if (token.type === 'scalar') + tag = findScalarTagByTest(ctx, value, token, onError); + else + tag = ctx.schema[identity.SCALAR]; + let scalar; + try { + const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options); + scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res); + } + catch (error) { + const msg = error instanceof Error ? error.message : String(error); + onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg); + scalar = new Scalar.Scalar(value); + } + scalar.range = range; + scalar.source = value; + if (type) + scalar.type = type; + if (tagName) + scalar.tag = tagName; + if (tag.format) + scalar.format = tag.format; + if (comment) + scalar.comment = comment; + return scalar; +} +function findScalarTagByName(schema, value, tagName, tagToken, onError) { + if (tagName === '!') + return schema[identity.SCALAR]; // non-specific tag + const matchWithTest = []; + for (const tag of schema.tags) { + if (!tag.collection && tag.tag === tagName) { + if (tag.default && tag.test) + matchWithTest.push(tag); + else + return tag; + } + } + for (const tag of matchWithTest) + if (tag.test?.test(value)) + return tag; + const kt = schema.knownTags[tagName]; + if (kt && !kt.collection) { + // Ensure that the known tag is available for stringifying, + // but does not get used by default. + schema.tags.push(Object.assign({}, kt, { default: false, test: undefined })); + return kt; + } + onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str'); + return schema[identity.SCALAR]; +} +function findScalarTagByTest({ atKey, directives, schema }, value, token, onError) { + const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) && + tag.test?.test(value)) || schema[identity.SCALAR]; + if (schema.compat) { + const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ?? + schema[identity.SCALAR]; + if (tag.tag !== compat.tag) { + const ts = directives.tagString(tag.tag); + const cs = directives.tagString(compat.tag); + const msg = `Value may be parsed as either ${ts} or ${cs}`; + onError(token, 'TAG_RESOLVE_FAILED', msg, true); + } + } + return tag; +} + +exports.composeScalar = composeScalar; + + +/***/ }), + +/***/ 9493: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var directives = __nccwpck_require__(5400); +var Document = __nccwpck_require__(42); +var errors = __nccwpck_require__(4236); +var identity = __nccwpck_require__(5589); +var composeDoc = __nccwpck_require__(5050); +var resolveEnd = __nccwpck_require__(1250); + +function getErrorPos(src) { + if (typeof src === 'number') + return [src, src + 1]; + if (Array.isArray(src)) + return src.length === 2 ? src : [src[0], src[1]]; + const { offset, source } = src; + return [offset, offset + (typeof source === 'string' ? source.length : 1)]; +} +function parsePrelude(prelude) { + let comment = ''; + let atComment = false; + let afterEmptyLine = false; + for (let i = 0; i < prelude.length; ++i) { + const source = prelude[i]; + switch (source[0]) { + case '#': + comment += + (comment === '' ? '' : afterEmptyLine ? '\n\n' : '\n') + + (source.substring(1) || ' '); + atComment = true; + afterEmptyLine = false; + break; + case '%': + if (prelude[i + 1]?.[0] !== '#') + i += 1; + atComment = false; + break; + default: + // This may be wrong after doc-end, but in that case it doesn't matter + if (!atComment) + afterEmptyLine = true; + atComment = false; + } + } + return { comment, afterEmptyLine }; +} +/** + * Compose a stream of CST nodes into a stream of YAML Documents. + * + * ```ts + * import { Composer, Parser } from 'yaml' + * + * const src: string = ... + * const tokens = new Parser().parse(src) + * const docs = new Composer().compose(tokens) + * ``` + */ +class Composer { + constructor(options = {}) { + this.doc = null; + this.atDirectives = false; + this.prelude = []; + this.errors = []; + this.warnings = []; + this.onError = (source, code, message, warning) => { + const pos = getErrorPos(source); + if (warning) + this.warnings.push(new errors.YAMLWarning(pos, code, message)); + else + this.errors.push(new errors.YAMLParseError(pos, code, message)); + }; + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + this.directives = new directives.Directives({ version: options.version || '1.2' }); + this.options = options; + } + decorate(doc, afterDoc) { + const { comment, afterEmptyLine } = parsePrelude(this.prelude); + //console.log({ dc: doc.comment, prelude, comment }) + if (comment) { + const dc = doc.contents; + if (afterDoc) { + doc.comment = doc.comment ? `${doc.comment}\n${comment}` : comment; + } + else if (afterEmptyLine || doc.directives.docStart || !dc) { + doc.commentBefore = comment; + } + else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) { + let it = dc.items[0]; + if (identity.isPair(it)) + it = it.key; + const cb = it.commentBefore; + it.commentBefore = cb ? `${comment}\n${cb}` : comment; + } + else { + const cb = dc.commentBefore; + dc.commentBefore = cb ? `${comment}\n${cb}` : comment; + } + } + if (afterDoc) { + Array.prototype.push.apply(doc.errors, this.errors); + Array.prototype.push.apply(doc.warnings, this.warnings); + } + else { + doc.errors = this.errors; + doc.warnings = this.warnings; + } + this.prelude = []; + this.errors = []; + this.warnings = []; + } + /** + * Current stream status information. + * + * Mostly useful at the end of input for an empty stream. + */ + streamInfo() { + return { + comment: parsePrelude(this.prelude).comment, + directives: this.directives, + errors: this.errors, + warnings: this.warnings + }; + } + /** + * Compose tokens into documents. + * + * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. + * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. + */ + *compose(tokens, forceDoc = false, endOffset = -1) { + for (const token of tokens) + yield* this.next(token); + yield* this.end(forceDoc, endOffset); + } + /** Advance the composer by one CST token. */ + *next(token) { + if (process.env.LOG_STREAM) + console.dir(token, { depth: null }); + switch (token.type) { + case 'directive': + this.directives.add(token.source, (offset, message, warning) => { + const pos = getErrorPos(token); + pos[0] += offset; + this.onError(pos, 'BAD_DIRECTIVE', message, warning); + }); + this.prelude.push(token.source); + this.atDirectives = true; + break; + case 'document': { + const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError); + if (this.atDirectives && !doc.directives.docStart) + this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line'); + this.decorate(doc, false); + if (this.doc) + yield this.doc; + this.doc = doc; + this.atDirectives = false; + break; + } + case 'byte-order-mark': + case 'space': + break; + case 'comment': + case 'newline': + this.prelude.push(token.source); + break; + case 'error': { + const msg = token.source + ? `${token.message}: ${JSON.stringify(token.source)}` + : token.message; + const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg); + if (this.atDirectives || !this.doc) + this.errors.push(error); + else + this.doc.errors.push(error); + break; + } + case 'doc-end': { + if (!this.doc) { + const msg = 'Unexpected doc-end without preceding document'; + this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg)); + break; + } + this.doc.directives.docEnd = true; + const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError); + this.decorate(this.doc, true); + if (end.comment) { + const dc = this.doc.comment; + this.doc.comment = dc ? `${dc}\n${end.comment}` : end.comment; + } + this.doc.range[2] = end.offset; + break; + } + default: + this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`)); + } + } + /** + * Call at end of input to yield any remaining document. + * + * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document. + * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly. + */ + *end(forceDoc = false, endOffset = -1) { + if (this.doc) { + this.decorate(this.doc, true); + yield this.doc; + this.doc = null; + } + else if (forceDoc) { + const opts = Object.assign({ _directives: this.directives }, this.options); + const doc = new Document.Document(undefined, opts); + if (this.atDirectives) + this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line'); + doc.range = [0, endOffset, endOffset]; + this.decorate(doc, false); + yield doc; + } + } +} + +exports.Composer = Composer; + + +/***/ }), + +/***/ 2986: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); +var resolveProps = __nccwpck_require__(6985); +var utilContainsNewline = __nccwpck_require__(976); +var utilFlowIndentCheck = __nccwpck_require__(3669); +var utilMapIncludes = __nccwpck_require__(6899); + +const startColMsg = 'All mapping items must start at the same column'; +function resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) { + const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap; + const map = new NodeClass(ctx.schema); + if (ctx.atRoot) + ctx.atRoot = false; + let offset = bm.offset; + let commentEnd = null; + for (const collItem of bm.items) { + const { start, key, sep, value } = collItem; + // key properties + const keyProps = resolveProps.resolveProps(start, { + indicator: 'explicit-key-ind', + next: key ?? sep?.[0], + offset, + onError, + parentIndent: bm.indent, + startOnNewline: true + }); + const implicitKey = !keyProps.found; + if (implicitKey) { + if (key) { + if (key.type === 'block-seq') + onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key'); + else if ('indent' in key && key.indent !== bm.indent) + onError(offset, 'BAD_INDENT', startColMsg); + } + if (!keyProps.anchor && !keyProps.tag && !sep) { + commentEnd = keyProps.end; + if (keyProps.comment) { + if (map.comment) + map.comment += '\n' + keyProps.comment; + else + map.comment = keyProps.comment; + } + continue; + } + if (keyProps.newlineAfterProp || utilContainsNewline.containsNewline(key)) { + onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line'); + } + } + else if (keyProps.found?.indent !== bm.indent) { + onError(offset, 'BAD_INDENT', startColMsg); + } + // key value + ctx.atKey = true; + const keyStart = keyProps.end; + const keyNode = key + ? composeNode(ctx, key, keyProps, onError) + : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError); + ctx.atKey = false; + if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) + onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); + // value properties + const valueProps = resolveProps.resolveProps(sep ?? [], { + indicator: 'map-value-ind', + next: value, + offset: keyNode.range[2], + onError, + parentIndent: bm.indent, + startOnNewline: !key || key.type === 'block-scalar' + }); + offset = valueProps.end; + if (valueProps.found) { + if (implicitKey) { + if (value?.type === 'block-map' && !valueProps.hasNewline) + onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings'); + if (ctx.options.strict && + keyProps.start < valueProps.found.offset - 1024) + onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key'); + } + // value value + const valueNode = value + ? composeNode(ctx, value, valueProps, onError) + : composeEmptyNode(ctx, offset, sep, null, valueProps, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError); + offset = valueNode.range[2]; + const pair = new Pair.Pair(keyNode, valueNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + map.items.push(pair); + } + else { + // key with no value + if (implicitKey) + onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values'); + if (valueProps.comment) { + if (keyNode.comment) + keyNode.comment += '\n' + valueProps.comment; + else + keyNode.comment = valueProps.comment; + } + const pair = new Pair.Pair(keyNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + map.items.push(pair); + } + } + if (commentEnd && commentEnd < offset) + onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content'); + map.range = [bm.offset, offset, commentEnd ?? offset]; + return map; +} + +exports.resolveBlockMap = resolveBlockMap; + + +/***/ }), + +/***/ 9485: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +function resolveBlockScalar(ctx, scalar, onError) { + const start = scalar.offset; + const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError); + if (!header) + return { value: '', type: null, comment: '', range: [start, start, start] }; + const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL; + const lines = scalar.source ? splitLines(scalar.source) : []; + // determine the end of content & start of chomping + let chompStart = lines.length; + for (let i = lines.length - 1; i >= 0; --i) { + const content = lines[i][1]; + if (content === '' || content === '\r') + chompStart = i; + else + break; + } + // shortcut for empty contents + if (chompStart === 0) { + const value = header.chomp === '+' && lines.length > 0 + ? '\n'.repeat(Math.max(1, lines.length - 1)) + : ''; + let end = start + header.length; + if (scalar.source) + end += scalar.source.length; + return { value, type, comment: header.comment, range: [start, end, end] }; + } + // find the indentation level to trim from start + let trimIndent = scalar.indent + header.indent; + let offset = scalar.offset + header.length; + let contentStart = 0; + for (let i = 0; i < chompStart; ++i) { + const [indent, content] = lines[i]; + if (content === '' || content === '\r') { + if (header.indent === 0 && indent.length > trimIndent) + trimIndent = indent.length; + } + else { + if (indent.length < trimIndent) { + const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator'; + onError(offset + indent.length, 'MISSING_CHAR', message); + } + if (header.indent === 0) + trimIndent = indent.length; + contentStart = i; + if (trimIndent === 0 && !ctx.atRoot) { + const message = 'Block scalar values in collections must be indented'; + onError(offset, 'BAD_INDENT', message); + } + break; + } + offset += indent.length + content.length + 1; + } + // include trailing more-indented empty lines in content + for (let i = lines.length - 1; i >= chompStart; --i) { + if (lines[i][0].length > trimIndent) + chompStart = i + 1; + } + let value = ''; + let sep = ''; + let prevMoreIndented = false; + // leading whitespace is kept intact + for (let i = 0; i < contentStart; ++i) + value += lines[i][0].slice(trimIndent) + '\n'; + for (let i = contentStart; i < chompStart; ++i) { + let [indent, content] = lines[i]; + offset += indent.length + content.length + 1; + const crlf = content[content.length - 1] === '\r'; + if (crlf) + content = content.slice(0, -1); + /* istanbul ignore if already caught in lexer */ + if (content && indent.length < trimIndent) { + const src = header.indent + ? 'explicit indentation indicator' + : 'first line'; + const message = `Block scalar lines must not be less indented than their ${src}`; + onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message); + indent = ''; + } + if (type === Scalar.Scalar.BLOCK_LITERAL) { + value += sep + indent.slice(trimIndent) + content; + sep = '\n'; + } + else if (indent.length > trimIndent || content[0] === '\t') { + // more-indented content within a folded block + if (sep === ' ') + sep = '\n'; + else if (!prevMoreIndented && sep === '\n') + sep = '\n\n'; + value += sep + indent.slice(trimIndent) + content; + sep = '\n'; + prevMoreIndented = true; + } + else if (content === '') { + // empty line + if (sep === '\n') + value += '\n'; + else + sep = '\n'; + } + else { + value += sep + content; + sep = ' '; + prevMoreIndented = false; + } + } + switch (header.chomp) { + case '-': + break; + case '+': + for (let i = chompStart; i < lines.length; ++i) + value += '\n' + lines[i][0].slice(trimIndent); + if (value[value.length - 1] !== '\n') + value += '\n'; + break; + default: + value += '\n'; + } + const end = start + header.length + scalar.source.length; + return { value, type, comment: header.comment, range: [start, end, end] }; +} +function parseBlockScalarHeader({ offset, props }, strict, onError) { + /* istanbul ignore if should not happen */ + if (props[0].type !== 'block-scalar-header') { + onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found'); + return null; + } + const { source } = props[0]; + const mode = source[0]; + let indent = 0; + let chomp = ''; + let error = -1; + for (let i = 1; i < source.length; ++i) { + const ch = source[i]; + if (!chomp && (ch === '-' || ch === '+')) + chomp = ch; + else { + const n = Number(ch); + if (!indent && n) + indent = n; + else if (error === -1) + error = offset + i; + } + } + if (error !== -1) + onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`); + let hasSpace = false; + let comment = ''; + let length = source.length; + for (let i = 1; i < props.length; ++i) { + const token = props[i]; + switch (token.type) { + case 'space': + hasSpace = true; + // fallthrough + case 'newline': + length += token.source.length; + break; + case 'comment': + if (strict && !hasSpace) { + const message = 'Comments must be separated from other tokens by white space characters'; + onError(token, 'MISSING_CHAR', message); + } + length += token.source.length; + comment = token.source.substring(1); + break; + case 'error': + onError(token, 'UNEXPECTED_TOKEN', token.message); + length += token.source.length; + break; + /* istanbul ignore next should not happen */ + default: { + const message = `Unexpected token in block scalar header: ${token.type}`; + onError(token, 'UNEXPECTED_TOKEN', message); + const ts = token.source; + if (ts && typeof ts === 'string') + length += ts.length; + } + } + } + return { mode, indent, chomp, comment, length }; +} +/** @returns Array of lines split up as `[indent, content]` */ +function splitLines(source) { + const split = source.split(/\n( *)/); + const first = split[0]; + const m = first.match(/^( *)/); + const line0 = m?.[1] + ? [m[1], first.slice(m[1].length)] + : ['', first]; + const lines = [line0]; + for (let i = 1; i < split.length; i += 2) + lines.push([split[i], split[i + 1]]); + return lines; +} + +exports.resolveBlockScalar = resolveBlockScalar; + + +/***/ }), + +/***/ 2289: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var YAMLSeq = __nccwpck_require__(5161); +var resolveProps = __nccwpck_require__(6985); +var utilFlowIndentCheck = __nccwpck_require__(3669); + +function resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) { + const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq; + const seq = new NodeClass(ctx.schema); + if (ctx.atRoot) + ctx.atRoot = false; + if (ctx.atKey) + ctx.atKey = false; + let offset = bs.offset; + let commentEnd = null; + for (const { start, value } of bs.items) { + const props = resolveProps.resolveProps(start, { + indicator: 'seq-item-ind', + next: value, + offset, + onError, + parentIndent: bs.indent, + startOnNewline: true + }); + if (!props.found) { + if (props.anchor || props.tag || value) { + if (value && value.type === 'block-seq') + onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column'); + else + onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator'); + } + else { + commentEnd = props.end; + if (props.comment) + seq.comment = props.comment; + continue; + } + } + const node = value + ? composeNode(ctx, value, props, onError) + : composeEmptyNode(ctx, props.end, start, null, props, onError); + if (ctx.schema.compat) + utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError); + offset = node.range[2]; + seq.items.push(node); + } + seq.range = [bs.offset, offset, commentEnd ?? offset]; + return seq; +} + +exports.resolveBlockSeq = resolveBlockSeq; + + +/***/ }), + +/***/ 1250: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function resolveEnd(end, offset, reqSpace, onError) { + let comment = ''; + if (end) { + let hasSpace = false; + let sep = ''; + for (const token of end) { + const { source, type } = token; + switch (type) { + case 'space': + hasSpace = true; + break; + case 'comment': { + if (reqSpace && !hasSpace) + onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); + const cb = source.substring(1) || ' '; + if (!comment) + comment = cb; + else + comment += sep + cb; + sep = ''; + break; + } + case 'newline': + if (comment) + sep += source; + hasSpace = true; + break; + default: + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`); + } + offset += source.length; + } + } + return { comment, offset }; +} + +exports.resolveEnd = resolveEnd; + + +/***/ }), + +/***/ 45: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var resolveEnd = __nccwpck_require__(1250); +var resolveProps = __nccwpck_require__(6985); +var utilContainsNewline = __nccwpck_require__(976); +var utilMapIncludes = __nccwpck_require__(6899); + +const blockMsg = 'Block collections are not allowed within flow collections'; +const isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq'); +function resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) { + const isMap = fc.start.source === '{'; + const fcName = isMap ? 'flow map' : 'flow sequence'; + const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq)); + const coll = new NodeClass(ctx.schema); + coll.flow = true; + const atRoot = ctx.atRoot; + if (atRoot) + ctx.atRoot = false; + if (ctx.atKey) + ctx.atKey = false; + let offset = fc.offset + fc.start.source.length; + for (let i = 0; i < fc.items.length; ++i) { + const collItem = fc.items[i]; + const { start, key, sep, value } = collItem; + const props = resolveProps.resolveProps(start, { + flow: fcName, + indicator: 'explicit-key-ind', + next: key ?? sep?.[0], + offset, + onError, + parentIndent: fc.indent, + startOnNewline: false + }); + if (!props.found) { + if (!props.anchor && !props.tag && !sep && !value) { + if (i === 0 && props.comma) + onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); + else if (i < fc.items.length - 1) + onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`); + if (props.comment) { + if (coll.comment) + coll.comment += '\n' + props.comment; + else + coll.comment = props.comment; + } + offset = props.end; + continue; + } + if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key)) + onError(key, // checked by containsNewline() + 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); + } + if (i === 0) { + if (props.comma) + onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`); + } + else { + if (!props.comma) + onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`); + if (props.comment) { + let prevItemComment = ''; + loop: for (const st of start) { + switch (st.type) { + case 'comma': + case 'space': + break; + case 'comment': + prevItemComment = st.source.substring(1); + break loop; + default: + break loop; + } + } + if (prevItemComment) { + let prev = coll.items[coll.items.length - 1]; + if (identity.isPair(prev)) + prev = prev.value ?? prev.key; + if (prev.comment) + prev.comment += '\n' + prevItemComment; + else + prev.comment = prevItemComment; + props.comment = props.comment.substring(prevItemComment.length + 1); + } + } + } + if (!isMap && !sep && !props.found) { + // item is a value in a seq + // → key & sep are empty, start does not include ? or : + const valueNode = value + ? composeNode(ctx, value, props, onError) + : composeEmptyNode(ctx, props.end, sep, null, props, onError); + coll.items.push(valueNode); + offset = valueNode.range[2]; + if (isBlock(value)) + onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); + } + else { + // item is a key+value pair + // key value + ctx.atKey = true; + const keyStart = props.end; + const keyNode = key + ? composeNode(ctx, key, props, onError) + : composeEmptyNode(ctx, keyStart, start, null, props, onError); + if (isBlock(key)) + onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg); + ctx.atKey = false; + // value properties + const valueProps = resolveProps.resolveProps(sep ?? [], { + flow: fcName, + indicator: 'map-value-ind', + next: value, + offset: keyNode.range[2], + onError, + parentIndent: fc.indent, + startOnNewline: false + }); + if (valueProps.found) { + if (!isMap && !props.found && ctx.options.strict) { + if (sep) + for (const st of sep) { + if (st === valueProps.found) + break; + if (st.type === 'newline') { + onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line'); + break; + } + } + if (props.start < valueProps.found.offset - 1024) + onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key'); + } + } + else if (value) { + if ('source' in value && value.source && value.source[0] === ':') + onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`); + else + onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`); + } + // value value + const valueNode = value + ? composeNode(ctx, value, valueProps, onError) + : valueProps.found + ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError) + : null; + if (valueNode) { + if (isBlock(value)) + onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg); + } + else if (valueProps.comment) { + if (keyNode.comment) + keyNode.comment += '\n' + valueProps.comment; + else + keyNode.comment = valueProps.comment; + } + const pair = new Pair.Pair(keyNode, valueNode); + if (ctx.options.keepSourceTokens) + pair.srcToken = collItem; + if (isMap) { + const map = coll; + if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode)) + onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique'); + map.items.push(pair); + } + else { + const map = new YAMLMap.YAMLMap(ctx.schema); + map.flow = true; + map.items.push(pair); + const endRange = (valueNode ?? keyNode).range; + map.range = [keyNode.range[0], endRange[1], endRange[2]]; + coll.items.push(map); + } + offset = valueNode ? valueNode.range[2] : valueProps.end; + } + } + const expectedEnd = isMap ? '}' : ']'; + const [ce, ...ee] = fc.end; + let cePos = offset; + if (ce && ce.source === expectedEnd) + cePos = ce.offset + ce.source.length; + else { + const name = fcName[0].toUpperCase() + fcName.substring(1); + const msg = atRoot + ? `${name} must end with a ${expectedEnd}` + : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`; + onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg); + if (ce && ce.source.length !== 1) + ee.unshift(ce); + } + if (ee.length > 0) { + const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError); + if (end.comment) { + if (coll.comment) + coll.comment += '\n' + end.comment; + else + coll.comment = end.comment; + } + coll.range = [fc.offset, cePos, end.offset]; + } + else { + coll.range = [fc.offset, cePos, cePos]; + } + return coll; +} + +exports.resolveFlowCollection = resolveFlowCollection; + + +/***/ }), + +/***/ 7578: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var resolveEnd = __nccwpck_require__(1250); + +function resolveFlowScalar(scalar, strict, onError) { + const { offset, type, source, end } = scalar; + let _type; + let value; + const _onError = (rel, code, msg) => onError(offset + rel, code, msg); + switch (type) { + case 'scalar': + _type = Scalar.Scalar.PLAIN; + value = plainValue(source, _onError); + break; + case 'single-quoted-scalar': + _type = Scalar.Scalar.QUOTE_SINGLE; + value = singleQuotedValue(source, _onError); + break; + case 'double-quoted-scalar': + _type = Scalar.Scalar.QUOTE_DOUBLE; + value = doubleQuotedValue(source, _onError); + break; + /* istanbul ignore next should not happen */ + default: + onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`); + return { + value: '', + type: null, + comment: '', + range: [offset, offset + source.length, offset + source.length] + }; + } + const valueEnd = offset + source.length; + const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError); + return { + value, + type: _type, + comment: re.comment, + range: [offset, valueEnd, re.offset] + }; +} +function plainValue(source, onError) { + let badChar = ''; + switch (source[0]) { + /* istanbul ignore next should not happen */ + case '\t': + badChar = 'a tab character'; + break; + case ',': + badChar = 'flow indicator character ,'; + break; + case '%': + badChar = 'directive indicator character %'; + break; + case '|': + case '>': { + badChar = `block scalar indicator ${source[0]}`; + break; + } + case '@': + case '`': { + badChar = `reserved character ${source[0]}`; + break; + } + } + if (badChar) + onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`); + return foldLines(source); +} +function singleQuotedValue(source, onError) { + if (source[source.length - 1] !== "'" || source.length === 1) + onError(source.length, 'MISSING_CHAR', "Missing closing 'quote"); + return foldLines(source.slice(1, -1)).replace(/''/g, "'"); +} +function foldLines(source) { + /** + * The negative lookbehind here and in the `re` RegExp is to + * prevent causing a polynomial search time in certain cases. + * + * The try-catch is for Safari, which doesn't support this yet: + * https://caniuse.com/js-regexp-lookbehind + */ + let first, line; + try { + first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch; + } + else { + res += ch; + } + } + if (source[source.length - 1] !== '"' || source.length === 1) + onError(source.length, 'MISSING_CHAR', 'Missing closing "quote'); + return res; +} +/** + * Fold a single newline into a space, multiple newlines to N - 1 newlines. + * Presumes `source[offset] === '\n'` + */ +function foldNewline(source, offset) { + let fold = ''; + let ch = source[offset + 1]; + while (ch === ' ' || ch === '\t' || ch === '\n' || ch === '\r') { + if (ch === '\r' && source[offset + 2] !== '\n') + break; + if (ch === '\n') + fold += '\n'; + offset += 1; + ch = source[offset + 1]; + } + if (!fold) + fold = ' '; + return { fold, offset }; +} +const escapeCodes = { + '0': '\0', // null character + a: '\x07', // bell character + b: '\b', // backspace + e: '\x1b', // escape character + f: '\f', // form feed + n: '\n', // line feed + r: '\r', // carriage return + t: '\t', // horizontal tab + v: '\v', // vertical tab + N: '\u0085', // Unicode next line + _: '\u00a0', // Unicode non-breaking space + L: '\u2028', // Unicode line separator + P: '\u2029', // Unicode paragraph separator + ' ': ' ', + '"': '"', + '/': '/', + '\\': '\\', + '\t': '\t' +}; +function parseCharCode(source, offset, length, onError) { + const cc = source.substr(offset, length); + const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc); + const code = ok ? parseInt(cc, 16) : NaN; + if (isNaN(code)) { + const raw = source.substr(offset - 2, length + 2); + onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`); + return raw; + } + return String.fromCodePoint(code); +} + +exports.resolveFlowScalar = resolveFlowScalar; + + +/***/ }), + +/***/ 6985: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) { + let spaceBefore = false; + let atNewline = startOnNewline; + let hasSpace = startOnNewline; + let comment = ''; + let commentSep = ''; + let hasNewline = false; + let reqSpace = false; + let tab = null; + let anchor = null; + let tag = null; + let newlineAfterProp = null; + let comma = null; + let found = null; + let start = null; + for (const token of tokens) { + if (reqSpace) { + if (token.type !== 'space' && + token.type !== 'newline' && + token.type !== 'comma') + onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); + reqSpace = false; + } + if (tab) { + if (atNewline && token.type !== 'comment' && token.type !== 'newline') { + onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); + } + tab = null; + } + switch (token.type) { + case 'space': + // At the doc level, tabs at line start may be parsed + // as leading white space rather than indentation. + // In a flow collection, only the parser handles indent. + if (!flow && + (indicator !== 'doc-start' || next?.type !== 'flow-collection') && + token.source.includes('\t')) { + tab = token; + } + hasSpace = true; + break; + case 'comment': { + if (!hasSpace) + onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters'); + const cb = token.source.substring(1) || ' '; + if (!comment) + comment = cb; + else + comment += commentSep + cb; + commentSep = ''; + atNewline = false; + break; + } + case 'newline': + if (atNewline) { + if (comment) + comment += token.source; + else + spaceBefore = true; + } + else + commentSep += token.source; + atNewline = true; + hasNewline = true; + if (anchor || tag) + newlineAfterProp = token; + hasSpace = true; + break; + case 'anchor': + if (anchor) + onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor'); + if (token.source.endsWith(':')) + onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true); + anchor = token; + if (start === null) + start = token.offset; + atNewline = false; + hasSpace = false; + reqSpace = true; + break; + case 'tag': { + if (tag) + onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag'); + tag = token; + if (start === null) + start = token.offset; + atNewline = false; + hasSpace = false; + reqSpace = true; + break; + } + case indicator: + // Could here handle preceding comments differently + if (anchor || tag) + onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`); + if (found) + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`); + found = token; + atNewline = + indicator === 'seq-item-ind' || indicator === 'explicit-key-ind'; + hasSpace = false; + break; + case 'comma': + if (flow) { + if (comma) + onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`); + comma = token; + atNewline = false; + hasSpace = false; + break; + } + // else fallthrough + default: + onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`); + atNewline = false; + hasSpace = false; + } + } + const last = tokens[tokens.length - 1]; + const end = last ? last.offset + last.source.length : offset; + if (reqSpace && + next && + next.type !== 'space' && + next.type !== 'newline' && + next.type !== 'comma' && + (next.type !== 'scalar' || next.source !== '')) { + onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space'); + } + if (tab && + ((atNewline && tab.indent <= parentIndent) || + next?.type === 'block-map' || + next?.type === 'block-seq')) + onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation'); + return { + comma, + found, + spaceBefore, + comment, + hasNewline, + anchor, + tag, + newlineAfterProp, + end, + start: start ?? end + }; +} + +exports.resolveProps = resolveProps; + + +/***/ }), + +/***/ 976: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function containsNewline(key) { + if (!key) + return null; + switch (key.type) { + case 'alias': + case 'scalar': + case 'double-quoted-scalar': + case 'single-quoted-scalar': + if (key.source.includes('\n')) + return true; + if (key.end) + for (const st of key.end) + if (st.type === 'newline') + return true; + return false; + case 'flow-collection': + for (const it of key.items) { + for (const st of it.start) + if (st.type === 'newline') + return true; + if (it.sep) + for (const st of it.sep) + if (st.type === 'newline') + return true; + if (containsNewline(it.key) || containsNewline(it.value)) + return true; + } + return false; + default: + return true; + } +} + +exports.containsNewline = containsNewline; + + +/***/ }), + +/***/ 8781: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function emptyScalarPosition(offset, before, pos) { + if (before) { + if (pos === null) + pos = before.length; + for (let i = pos - 1; i >= 0; --i) { + let st = before[i]; + switch (st.type) { + case 'space': + case 'comment': + case 'newline': + offset -= st.source.length; + continue; + } + // Technically, an empty scalar is immediately after the last non-empty + // node, but it's more useful to place it after any whitespace. + st = before[++i]; + while (st?.type === 'space') { + offset += st.source.length; + st = before[++i]; + } + break; + } + } + return offset; +} + +exports.emptyScalarPosition = emptyScalarPosition; + + +/***/ }), + +/***/ 3669: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var utilContainsNewline = __nccwpck_require__(976); + +function flowIndentCheck(indent, fc, onError) { + if (fc?.type === 'flow-collection') { + const end = fc.end[0]; + if (end.indent === indent && + (end.source === ']' || end.source === '}') && + utilContainsNewline.containsNewline(fc)) { + const msg = 'Flow end indicator should be more indented than parent'; + onError(end, 'BAD_INDENT', msg, true); + } + } +} + +exports.flowIndentCheck = flowIndentCheck; + + +/***/ }), + +/***/ 6899: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +function mapIncludes(ctx, items, search) { + const { uniqueKeys } = ctx.options; + if (uniqueKeys === false) + return false; + const isEqual = typeof uniqueKeys === 'function' + ? uniqueKeys + : (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value); + return items.some(pair => isEqual(pair.key, search)); +} + +exports.mapIncludes = mapIncludes; + + +/***/ }), + +/***/ 42: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var toJS = __nccwpck_require__(2463); +var Schema = __nccwpck_require__(6831); +var stringifyDocument = __nccwpck_require__(5225); +var anchors = __nccwpck_require__(8459); +var applyReviver = __nccwpck_require__(3412); +var createNode = __nccwpck_require__(9652); +var directives = __nccwpck_require__(5400); + +class Document { + constructor(value, replacer, options) { + /** A comment before this Document */ + this.commentBefore = null; + /** A comment immediately after this Document */ + this.comment = null; + /** Errors encountered during parsing. */ + this.errors = []; + /** Warnings encountered during parsing. */ + this.warnings = []; + Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC }); + let _replacer = null; + if (typeof replacer === 'function' || Array.isArray(replacer)) { + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + replacer = undefined; + } + const opt = Object.assign({ + intAsBigInt: false, + keepSourceTokens: false, + logLevel: 'warn', + prettyErrors: true, + strict: true, + stringKeys: false, + uniqueKeys: true, + version: '1.2' + }, options); + this.options = opt; + let { version } = opt; + if (options?._directives) { + this.directives = options._directives.atDocument(); + if (this.directives.yaml.explicit) + version = this.directives.yaml.version; + } + else + this.directives = new directives.Directives({ version }); + this.setSchema(version, options); + // @ts-expect-error We can't really know that this matches Contents. + this.contents = + value === undefined ? null : this.createNode(value, _replacer, options); + } + /** + * Create a deep copy of this Document and its contents. + * + * Custom Node values that inherit from `Object` still refer to their original instances. + */ + clone() { + const copy = Object.create(Document.prototype, { + [identity.NODE_TYPE]: { value: identity.DOC } + }); + copy.commentBefore = this.commentBefore; + copy.comment = this.comment; + copy.errors = this.errors.slice(); + copy.warnings = this.warnings.slice(); + copy.options = Object.assign({}, this.options); + if (this.directives) + copy.directives = this.directives.clone(); + copy.schema = this.schema.clone(); + // @ts-expect-error We can't really know that this matches Contents. + copy.contents = identity.isNode(this.contents) + ? this.contents.clone(copy.schema) + : this.contents; + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** Adds a value to the document. */ + add(value) { + if (assertCollection(this.contents)) + this.contents.add(value); + } + /** Adds a value to the document. */ + addIn(path, value) { + if (assertCollection(this.contents)) + this.contents.addIn(path, value); + } + /** + * Create a new `Alias` node, ensuring that the target `node` has the required anchor. + * + * If `node` already has an anchor, `name` is ignored. + * Otherwise, the `node.anchor` value will be set to `name`, + * or if an anchor with that name is already present in the document, + * `name` will be used as a prefix for a new unique anchor. + * If `name` is undefined, the generated anchor will use 'a' as a prefix. + */ + createAlias(node, name) { + if (!node.anchor) { + const prev = anchors.anchorNames(this); + node.anchor = + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name; + } + return new Alias.Alias(node.anchor); + } + createNode(value, replacer, options) { + let _replacer = undefined; + if (typeof replacer === 'function') { + value = replacer.call({ '': value }, '', value); + _replacer = replacer; + } + else if (Array.isArray(replacer)) { + const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number; + const asStr = replacer.filter(keyToStr).map(String); + if (asStr.length > 0) + replacer = replacer.concat(asStr); + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + replacer = undefined; + } + const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {}; + const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, + // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing + anchorPrefix || 'a'); + const ctx = { + aliasDuplicateObjects: aliasDuplicateObjects ?? true, + keepUndefined: keepUndefined ?? false, + onAnchor, + onTagObj, + replacer: _replacer, + schema: this.schema, + sourceObjects + }; + const node = createNode.createNode(value, tag, ctx); + if (flow && identity.isCollection(node)) + node.flow = true; + setAnchors(); + return node; + } + /** + * Convert a key and a value into a `Pair` using the current schema, + * recursively wrapping all values as `Scalar` or `Collection` nodes. + */ + createPair(key, value, options = {}) { + const k = this.createNode(key, null, options); + const v = this.createNode(value, null, options); + return new Pair.Pair(k, v); + } + /** + * Removes a value from the document. + * @returns `true` if the item was found and removed. + */ + delete(key) { + return assertCollection(this.contents) ? this.contents.delete(key) : false; + } + /** + * Removes a value from the document. + * @returns `true` if the item was found and removed. + */ + deleteIn(path) { + if (Collection.isEmptyPath(path)) { + if (this.contents == null) + return false; + // @ts-expect-error Presumed impossible if Strict extends false + this.contents = null; + return true; + } + return assertCollection(this.contents) + ? this.contents.deleteIn(path) + : false; + } + /** + * Returns item at `key`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + get(key, keepScalar) { + return identity.isCollection(this.contents) + ? this.contents.get(key, keepScalar) + : undefined; + } + /** + * Returns item at `path`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + getIn(path, keepScalar) { + if (Collection.isEmptyPath(path)) + return !keepScalar && identity.isScalar(this.contents) + ? this.contents.value + : this.contents; + return identity.isCollection(this.contents) + ? this.contents.getIn(path, keepScalar) + : undefined; + } + /** + * Checks if the document includes a value with the key `key`. + */ + has(key) { + return identity.isCollection(this.contents) ? this.contents.has(key) : false; + } + /** + * Checks if the document includes a value at `path`. + */ + hasIn(path) { + if (Collection.isEmptyPath(path)) + return this.contents !== undefined; + return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false; + } + /** + * Sets a value in this document. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + set(key, value) { + if (this.contents == null) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = Collection.collectionFromPath(this.schema, [key], value); + } + else if (assertCollection(this.contents)) { + this.contents.set(key, value); + } + } + /** + * Sets a value in this document. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + setIn(path, value) { + if (Collection.isEmptyPath(path)) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = value; + } + else if (this.contents == null) { + // @ts-expect-error We can't really know that this matches Contents. + this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value); + } + else if (assertCollection(this.contents)) { + this.contents.setIn(path, value); + } + } + /** + * Change the YAML version and schema used by the document. + * A `null` version disables support for directives, explicit tags, anchors, and aliases. + * It also requires the `schema` option to be given as a `Schema` instance value. + * + * Overrides all previously set schema options. + */ + setSchema(version, options = {}) { + if (typeof version === 'number') + version = String(version); + let opt; + switch (version) { + case '1.1': + if (this.directives) + this.directives.yaml.version = '1.1'; + else + this.directives = new directives.Directives({ version: '1.1' }); + opt = { resolveKnownTags: false, schema: 'yaml-1.1' }; + break; + case '1.2': + case 'next': + if (this.directives) + this.directives.yaml.version = version; + else + this.directives = new directives.Directives({ version }); + opt = { resolveKnownTags: true, schema: 'core' }; + break; + case null: + if (this.directives) + delete this.directives; + opt = null; + break; + default: { + const sv = JSON.stringify(version); + throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`); + } + } + // Not using `instanceof Schema` to allow for duck typing + if (options.schema instanceof Object) + this.schema = options.schema; + else if (opt) + this.schema = new Schema.Schema(Object.assign(opt, options)); + else + throw new Error(`With a null YAML version, the { schema: Schema } option is required`); + } + // json & jsonArg are only used from toJSON() + toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { + const ctx = { + anchors: new Map(), + doc: this, + keep: !json, + mapAsMap: mapAsMap === true, + mapKeyWarned: false, + maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 + }; + const res = toJS.toJS(this.contents, jsonArg ?? '', ctx); + if (typeof onAnchor === 'function') + for (const { count, res } of ctx.anchors.values()) + onAnchor(res, count); + return typeof reviver === 'function' + ? applyReviver.applyReviver(reviver, { '': res }, '', res) + : res; + } + /** + * A JSON representation of the document `contents`. + * + * @param jsonArg Used by `JSON.stringify` to indicate the array index or + * property name. + */ + toJSON(jsonArg, onAnchor) { + return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor }); + } + /** A YAML representation of the document. */ + toString(options = {}) { + if (this.errors.length > 0) + throw new Error('Document with errors cannot be stringified'); + if ('indent' in options && + (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) { + const s = JSON.stringify(options.indent); + throw new Error(`"indent" option must be a positive integer, not ${s}`); + } + return stringifyDocument.stringifyDocument(this, options); + } +} +function assertCollection(contents) { + if (identity.isCollection(contents)) + return true; + throw new Error('Expected a YAML collection as document contents'); +} + +exports.Document = Document; + + +/***/ }), + +/***/ 8459: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var visit = __nccwpck_require__(6796); + +/** + * Verify that the input string is a valid anchor. + * + * Will throw on errors. + */ +function anchorIsValid(anchor) { + if (/[\x00-\x19\s,[\]{}]/.test(anchor)) { + const sa = JSON.stringify(anchor); + const msg = `Anchor must not contain whitespace or control characters: ${sa}`; + throw new Error(msg); + } + return true; +} +function anchorNames(root) { + const anchors = new Set(); + visit.visit(root, { + Value(_key, node) { + if (node.anchor) + anchors.add(node.anchor); + } + }); + return anchors; +} +/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */ +function findNewAnchor(prefix, exclude) { + for (let i = 1; true; ++i) { + const name = `${prefix}${i}`; + if (!exclude.has(name)) + return name; + } +} +function createNodeAnchors(doc, prefix) { + const aliasObjects = []; + const sourceObjects = new Map(); + let prevAnchors = null; + return { + onAnchor: (source) => { + aliasObjects.push(source); + if (!prevAnchors) + prevAnchors = anchorNames(doc); + const anchor = findNewAnchor(prefix, prevAnchors); + prevAnchors.add(anchor); + return anchor; + }, + /** + * With circular references, the source node is only resolved after all + * of its child nodes are. This is why anchors are set only after all of + * the nodes have been created. + */ + setAnchors: () => { + for (const source of aliasObjects) { + const ref = sourceObjects.get(source); + if (typeof ref === 'object' && + ref.anchor && + (identity.isScalar(ref.node) || identity.isCollection(ref.node))) { + ref.node.anchor = ref.anchor; + } + else { + const error = new Error('Failed to resolve repeated object (this should not happen)'); + error.source = source; + throw error; + } + } + }, + sourceObjects + }; +} + +exports.anchorIsValid = anchorIsValid; +exports.anchorNames = anchorNames; +exports.createNodeAnchors = createNodeAnchors; +exports.findNewAnchor = findNewAnchor; + + +/***/ }), + +/***/ 3412: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec, + * in section 24.5.1.1 "Runtime Semantics: InternalizeJSONProperty" of the + * 2021 edition: https://tc39.es/ecma262/#sec-json.parse + * + * Includes extensions for handling Map and Set objects. + */ +function applyReviver(reviver, obj, key, val) { + if (val && typeof val === 'object') { + if (Array.isArray(val)) { + for (let i = 0, len = val.length; i < len; ++i) { + const v0 = val[i]; + const v1 = applyReviver(reviver, val, String(i), v0); + // eslint-disable-next-line @typescript-eslint/no-array-delete + if (v1 === undefined) + delete val[i]; + else if (v1 !== v0) + val[i] = v1; + } + } + else if (val instanceof Map) { + for (const k of Array.from(val.keys())) { + const v0 = val.get(k); + const v1 = applyReviver(reviver, val, k, v0); + if (v1 === undefined) + val.delete(k); + else if (v1 !== v0) + val.set(k, v1); + } + } + else if (val instanceof Set) { + for (const v0 of Array.from(val)) { + const v1 = applyReviver(reviver, val, v0, v0); + if (v1 === undefined) + val.delete(v0); + else if (v1 !== v0) { + val.delete(v0); + val.add(v1); + } + } + } + else { + for (const [k, v0] of Object.entries(val)) { + const v1 = applyReviver(reviver, val, k, v0); + if (v1 === undefined) + delete val[k]; + else if (v1 !== v0) + val[k] = v1; + } + } + } + return reviver.call(obj, key, val); +} + +exports.applyReviver = applyReviver; + + +/***/ }), + +/***/ 9652: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Alias = __nccwpck_require__(5639); +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); + +const defaultTagPrefix = 'tag:yaml.org,2002:'; +function findTagObject(value, tagName, tags) { + if (tagName) { + const match = tags.filter(t => t.tag === tagName); + const tagObj = match.find(t => !t.format) ?? match[0]; + if (!tagObj) + throw new Error(`Tag ${tagName} not found`); + return tagObj; + } + return tags.find(t => t.identify?.(value) && !t.format); +} +function createNode(value, tagName, ctx) { + if (identity.isDocument(value)) + value = value.contents; + if (identity.isNode(value)) + return value; + if (identity.isPair(value)) { + const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx); + map.items.push(value); + return map; + } + if (value instanceof String || + value instanceof Number || + value instanceof Boolean || + (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere + ) { + // https://tc39.es/ecma262/#sec-serializejsonproperty + value = value.valueOf(); + } + const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx; + // Detect duplicate references to the same object & use Alias nodes for all + // after first. The `ref` wrapper allows for circular references to resolve. + let ref = undefined; + if (aliasDuplicateObjects && value && typeof value === 'object') { + ref = sourceObjects.get(value); + if (ref) { + if (!ref.anchor) + ref.anchor = onAnchor(value); + return new Alias.Alias(ref.anchor); + } + else { + ref = { anchor: null, node: null }; + sourceObjects.set(value, ref); + } + } + if (tagName?.startsWith('!!')) + tagName = defaultTagPrefix + tagName.slice(2); + let tagObj = findTagObject(value, tagName, schema.tags); + if (!tagObj) { + if (value && typeof value.toJSON === 'function') { + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + value = value.toJSON(); + } + if (!value || typeof value !== 'object') { + const node = new Scalar.Scalar(value); + if (ref) + ref.node = node; + return node; + } + tagObj = + value instanceof Map + ? schema[identity.MAP] + : Symbol.iterator in Object(value) + ? schema[identity.SEQ] + : schema[identity.MAP]; + } + if (onTagObj) { + onTagObj(tagObj); + delete ctx.onTagObj; + } + const node = tagObj?.createNode + ? tagObj.createNode(ctx.schema, value, ctx) + : typeof tagObj?.nodeClass?.from === 'function' + ? tagObj.nodeClass.from(ctx.schema, value, ctx) + : new Scalar.Scalar(value); + if (tagName) + node.tag = tagName; + else if (!tagObj.default) + node.tag = tagObj.tag; + if (ref) + ref.node = node; + return node; +} + +exports.createNode = createNode; + + +/***/ }), + +/***/ 5400: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var visit = __nccwpck_require__(6796); + +const escapeChars = { + '!': '%21', + ',': '%2C', + '[': '%5B', + ']': '%5D', + '{': '%7B', + '}': '%7D' +}; +const escapeTagName = (tn) => tn.replace(/[!,[\]{}]/g, ch => escapeChars[ch]); +class Directives { + constructor(yaml, tags) { + /** + * The directives-end/doc-start marker `---`. If `null`, a marker may still be + * included in the document's stringified representation. + */ + this.docStart = null; + /** The doc-end marker `...`. */ + this.docEnd = false; + this.yaml = Object.assign({}, Directives.defaultYaml, yaml); + this.tags = Object.assign({}, Directives.defaultTags, tags); + } + clone() { + const copy = new Directives(this.yaml, this.tags); + copy.docStart = this.docStart; + return copy; + } + /** + * During parsing, get a Directives instance for the current document and + * update the stream state according to the current version's spec. + */ + atDocument() { + const res = new Directives(this.yaml, this.tags); + switch (this.yaml.version) { + case '1.1': + this.atNextDocument = true; + break; + case '1.2': + this.atNextDocument = false; + this.yaml = { + explicit: Directives.defaultYaml.explicit, + version: '1.2' + }; + this.tags = Object.assign({}, Directives.defaultTags); + break; + } + return res; + } + /** + * @param onError - May be called even if the action was successful + * @returns `true` on success + */ + add(line, onError) { + if (this.atNextDocument) { + this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' }; + this.tags = Object.assign({}, Directives.defaultTags); + this.atNextDocument = false; + } + const parts = line.trim().split(/[ \t]+/); + const name = parts.shift(); + switch (name) { + case '%TAG': { + if (parts.length !== 2) { + onError(0, '%TAG directive should contain exactly two parts'); + if (parts.length < 2) + return false; + } + const [handle, prefix] = parts; + this.tags[handle] = prefix; + return true; + } + case '%YAML': { + this.yaml.explicit = true; + if (parts.length !== 1) { + onError(0, '%YAML directive should contain exactly one part'); + return false; + } + const [version] = parts; + if (version === '1.1' || version === '1.2') { + this.yaml.version = version; + return true; + } + else { + const isValid = /^\d+\.\d+$/.test(version); + onError(6, `Unsupported YAML version ${version}`, isValid); + return false; + } + } + default: + onError(0, `Unknown directive ${name}`, true); + return false; + } + } + /** + * Resolves a tag, matching handles to those defined in %TAG directives. + * + * @returns Resolved tag, which may also be the non-specific tag `'!'` or a + * `'!local'` tag, or `null` if unresolvable. + */ + tagName(source, onError) { + if (source === '!') + return '!'; // non-specific tag + if (source[0] !== '!') { + onError(`Not a valid tag: ${source}`); + return null; + } + if (source[1] === '<') { + const verbatim = source.slice(2, -1); + if (verbatim === '!' || verbatim === '!!') { + onError(`Verbatim tags aren't resolved, so ${source} is invalid.`); + return null; + } + if (source[source.length - 1] !== '>') + onError('Verbatim tags must end with a >'); + return verbatim; + } + const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s); + if (!suffix) + onError(`The ${source} tag has no suffix`); + const prefix = this.tags[handle]; + if (prefix) { + try { + return prefix + decodeURIComponent(suffix); + } + catch (error) { + onError(String(error)); + return null; + } + } + if (handle === '!') + return source; // local tag + onError(`Could not resolve tag: ${source}`); + return null; + } + /** + * Given a fully resolved tag, returns its printable string form, + * taking into account current tag prefixes and defaults. + */ + tagString(tag) { + for (const [handle, prefix] of Object.entries(this.tags)) { + if (tag.startsWith(prefix)) + return handle + escapeTagName(tag.substring(prefix.length)); + } + return tag[0] === '!' ? tag : `!<${tag}>`; + } + toString(doc) { + const lines = this.yaml.explicit + ? [`%YAML ${this.yaml.version || '1.2'}`] + : []; + const tagEntries = Object.entries(this.tags); + let tagNames; + if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) { + const tags = {}; + visit.visit(doc.contents, (_key, node) => { + if (identity.isNode(node) && node.tag) + tags[node.tag] = true; + }); + tagNames = Object.keys(tags); + } + else + tagNames = []; + for (const [handle, prefix] of tagEntries) { + if (handle === '!!' && prefix === 'tag:yaml.org,2002:') + continue; + if (!doc || tagNames.some(tn => tn.startsWith(prefix))) + lines.push(`%TAG ${handle} ${prefix}`); + } + return lines.join('\n'); + } +} +Directives.defaultYaml = { explicit: false, version: '1.2' }; +Directives.defaultTags = { '!!': 'tag:yaml.org,2002:' }; + +exports.Directives = Directives; + + +/***/ }), + +/***/ 4236: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +class YAMLError extends Error { + constructor(name, pos, code, message) { + super(); + this.name = name; + this.code = code; + this.message = message; + this.pos = pos; + } +} +class YAMLParseError extends YAMLError { + constructor(pos, code, message) { + super('YAMLParseError', pos, code, message); + } +} +class YAMLWarning extends YAMLError { + constructor(pos, code, message) { + super('YAMLWarning', pos, code, message); + } +} +const prettifyError = (src, lc) => (error) => { + if (error.pos[0] === -1) + return; + error.linePos = error.pos.map(pos => lc.linePos(pos)); + const { line, col } = error.linePos[0]; + error.message += ` at line ${line}, column ${col}`; + let ci = col - 1; + let lineStr = src + .substring(lc.lineStarts[line - 1], lc.lineStarts[line]) + .replace(/[\n\r]+$/, ''); + // Trim to max 80 chars, keeping col position near the middle + if (ci >= 60 && lineStr.length > 80) { + const trimStart = Math.min(ci - 39, lineStr.length - 79); + lineStr = '…' + lineStr.substring(trimStart); + ci -= trimStart - 1; + } + if (lineStr.length > 80) + lineStr = lineStr.substring(0, 79) + '…'; + // Include previous line in context if pointing at line start + if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) { + // Regexp won't match if start is trimmed + let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]); + if (prev.length > 80) + prev = prev.substring(0, 79) + '…\n'; + lineStr = prev + lineStr; + } + if (/[^ ]/.test(lineStr)) { + let count = 1; + const end = error.linePos[1]; + if (end && end.line === line && end.col > col) { + count = Math.max(1, Math.min(end.col - col, 80 - ci)); + } + const pointer = ' '.repeat(ci) + '^'.repeat(count); + error.message += `:\n\n${lineStr}\n${pointer}\n`; + } +}; + +exports.YAMLError = YAMLError; +exports.YAMLParseError = YAMLParseError; +exports.YAMLWarning = YAMLWarning; +exports.prettifyError = prettifyError; + + +/***/ }), + +/***/ 4083: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var composer = __nccwpck_require__(9493); +var Document = __nccwpck_require__(42); +var Schema = __nccwpck_require__(6831); +var errors = __nccwpck_require__(4236); +var Alias = __nccwpck_require__(5639); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var cst = __nccwpck_require__(9169); +var lexer = __nccwpck_require__(5976); +var lineCounter = __nccwpck_require__(1929); +var parser = __nccwpck_require__(3328); +var publicApi = __nccwpck_require__(8649); +var visit = __nccwpck_require__(6796); + + + +exports.Composer = composer.Composer; +exports.Document = Document.Document; +exports.Schema = Schema.Schema; +exports.YAMLError = errors.YAMLError; +exports.YAMLParseError = errors.YAMLParseError; +exports.YAMLWarning = errors.YAMLWarning; +exports.Alias = Alias.Alias; +exports.isAlias = identity.isAlias; +exports.isCollection = identity.isCollection; +exports.isDocument = identity.isDocument; +exports.isMap = identity.isMap; +exports.isNode = identity.isNode; +exports.isPair = identity.isPair; +exports.isScalar = identity.isScalar; +exports.isSeq = identity.isSeq; +exports.Pair = Pair.Pair; +exports.Scalar = Scalar.Scalar; +exports.YAMLMap = YAMLMap.YAMLMap; +exports.YAMLSeq = YAMLSeq.YAMLSeq; +exports.CST = cst; +exports.Lexer = lexer.Lexer; +exports.LineCounter = lineCounter.LineCounter; +exports.Parser = parser.Parser; +exports.parse = publicApi.parse; +exports.parseAllDocuments = publicApi.parseAllDocuments; +exports.parseDocument = publicApi.parseDocument; +exports.stringify = publicApi.stringify; +exports.visit = visit.visit; +exports.visitAsync = visit.visitAsync; + + +/***/ }), + +/***/ 6909: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function debug(logLevel, ...messages) { + if (logLevel === 'debug') + console.log(...messages); +} +function warn(logLevel, warning) { + if (logLevel === 'debug' || logLevel === 'warn') { + if (typeof process !== 'undefined' && process.emitWarning) + process.emitWarning(warning); + else + console.warn(warning); + } +} + +exports.debug = debug; +exports.warn = warn; + + +/***/ }), + +/***/ 5639: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var anchors = __nccwpck_require__(8459); +var visit = __nccwpck_require__(6796); +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); +var toJS = __nccwpck_require__(2463); + +class Alias extends Node.NodeBase { + constructor(source) { + super(identity.ALIAS); + this.source = source; + Object.defineProperty(this, 'tag', { + set() { + throw new Error('Alias nodes cannot have tags'); + } + }); + } + /** + * Resolve the value of this alias within `doc`, finding the last + * instance of the `source` anchor before this node. + */ + resolve(doc) { + let found = undefined; + visit.visit(doc, { + Node: (_key, node) => { + if (node === this) + return visit.visit.BREAK; + if (node.anchor === this.source) + found = node; + } + }); + return found; + } + toJSON(_arg, ctx) { + if (!ctx) + return { source: this.source }; + const { anchors, doc, maxAliasCount } = ctx; + const source = this.resolve(doc); + if (!source) { + const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; + throw new ReferenceError(msg); + } + let data = anchors.get(source); + if (!data) { + // Resolve anchors for Node.prototype.toJS() + toJS.toJS(source, null, ctx); + data = anchors.get(source); + } + /* istanbul ignore if */ + if (!data || data.res === undefined) { + const msg = 'This should not happen: Alias anchor was not resolved?'; + throw new ReferenceError(msg); + } + if (maxAliasCount >= 0) { + data.count += 1; + if (data.aliasCount === 0) + data.aliasCount = getAliasCount(doc, source, anchors); + if (data.count * data.aliasCount > maxAliasCount) { + const msg = 'Excessive alias count indicates a resource exhaustion attack'; + throw new ReferenceError(msg); + } + } + return data.res; + } + toString(ctx, _onComment, _onChompKeep) { + const src = `*${this.source}`; + if (ctx) { + anchors.anchorIsValid(this.source); + if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) { + const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`; + throw new Error(msg); + } + if (ctx.implicitKey) + return `${src} `; + } + return src; + } +} +function getAliasCount(doc, node, anchors) { + if (identity.isAlias(node)) { + const source = node.resolve(doc); + const anchor = anchors && source && anchors.get(source); + return anchor ? anchor.count * anchor.aliasCount : 0; + } + else if (identity.isCollection(node)) { + let count = 0; + for (const item of node.items) { + const c = getAliasCount(doc, item, anchors); + if (c > count) + count = c; + } + return count; + } + else if (identity.isPair(node)) { + const kc = getAliasCount(doc, node.key, anchors); + const vc = getAliasCount(doc, node.value, anchors); + return Math.max(kc, vc); + } + return 1; +} + +exports.Alias = Alias; + + +/***/ }), + +/***/ 3466: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); + +function collectionFromPath(schema, path, value) { + let v = value; + for (let i = path.length - 1; i >= 0; --i) { + const k = path[i]; + if (typeof k === 'number' && Number.isInteger(k) && k >= 0) { + const a = []; + a[k] = v; + v = a; + } + else { + v = new Map([[k, v]]); + } + } + return createNode.createNode(v, undefined, { + aliasDuplicateObjects: false, + keepUndefined: false, + onAnchor: () => { + throw new Error('This should not happen, please report a bug.'); + }, + schema, + sourceObjects: new Map() + }); +} +// Type guard is intentionally a little wrong so as to be more useful, +// as it does not cover untypable empty non-string iterables (e.g. []). +const isEmptyPath = (path) => path == null || + (typeof path === 'object' && !!path[Symbol.iterator]().next().done); +class Collection extends Node.NodeBase { + constructor(type, schema) { + super(type); + Object.defineProperty(this, 'schema', { + value: schema, + configurable: true, + enumerable: false, + writable: true + }); + } + /** + * Create a copy of this collection. + * + * @param schema - If defined, overwrites the original's schema + */ + clone(schema) { + const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); + if (schema) + copy.schema = schema; + copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it); + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** + * Adds a value to the collection. For `!!map` and `!!omap` the value must + * be a Pair instance or a `{ key, value }` object, which may not have a key + * that already exists in the map. + */ + addIn(path, value) { + if (isEmptyPath(path)) + this.add(value); + else { + const [key, ...rest] = path; + const node = this.get(key, true); + if (identity.isCollection(node)) + node.addIn(rest, value); + else if (node === undefined && this.schema) + this.set(key, collectionFromPath(this.schema, rest, value)); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } + /** + * Removes a value from the collection. + * @returns `true` if the item was found and removed. + */ + deleteIn(path) { + const [key, ...rest] = path; + if (rest.length === 0) + return this.delete(key); + const node = this.get(key, true); + if (identity.isCollection(node)) + return node.deleteIn(rest); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + /** + * Returns item at `key`, or `undefined` if not found. By default unwraps + * scalar values from their surrounding node; to disable set `keepScalar` to + * `true` (collections are always returned intact). + */ + getIn(path, keepScalar) { + const [key, ...rest] = path; + const node = this.get(key, true); + if (rest.length === 0) + return !keepScalar && identity.isScalar(node) ? node.value : node; + else + return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined; + } + hasAllNullValues(allowScalar) { + return this.items.every(node => { + if (!identity.isPair(node)) + return false; + const n = node.value; + return (n == null || + (allowScalar && + identity.isScalar(n) && + n.value == null && + !n.commentBefore && + !n.comment && + !n.tag)); + }); + } + /** + * Checks if the collection includes a value with the key `key`. + */ + hasIn(path) { + const [key, ...rest] = path; + if (rest.length === 0) + return this.has(key); + const node = this.get(key, true); + return identity.isCollection(node) ? node.hasIn(rest) : false; + } + /** + * Sets a value in this collection. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + */ + setIn(path, value) { + const [key, ...rest] = path; + if (rest.length === 0) { + this.set(key, value); + } + else { + const node = this.get(key, true); + if (identity.isCollection(node)) + node.setIn(rest, value); + else if (node === undefined && this.schema) + this.set(key, collectionFromPath(this.schema, rest, value)); + else + throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`); + } + } +} + +exports.Collection = Collection; +exports.collectionFromPath = collectionFromPath; +exports.isEmptyPath = isEmptyPath; + + +/***/ }), + +/***/ 1399: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var applyReviver = __nccwpck_require__(3412); +var identity = __nccwpck_require__(5589); +var toJS = __nccwpck_require__(2463); + +class NodeBase { + constructor(type) { + Object.defineProperty(this, identity.NODE_TYPE, { value: type }); + } + /** Create a copy of this node. */ + clone() { + const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this)); + if (this.range) + copy.range = this.range.slice(); + return copy; + } + /** A plain JavaScript representation of this node. */ + toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) { + if (!identity.isDocument(doc)) + throw new TypeError('A document argument is required'); + const ctx = { + anchors: new Map(), + doc, + keep: true, + mapAsMap: mapAsMap === true, + mapKeyWarned: false, + maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100 + }; + const res = toJS.toJS(this, '', ctx); + if (typeof onAnchor === 'function') + for (const { count, res } of ctx.anchors.values()) + onAnchor(res, count); + return typeof reviver === 'function' + ? applyReviver.applyReviver(reviver, { '': res }, '', res) + : res; + } +} + +exports.NodeBase = NodeBase; + + +/***/ }), + +/***/ 246: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var stringifyPair = __nccwpck_require__(4875); +var addPairToJSMap = __nccwpck_require__(4676); +var identity = __nccwpck_require__(5589); + +function createPair(key, value, ctx) { + const k = createNode.createNode(key, undefined, ctx); + const v = createNode.createNode(value, undefined, ctx); + return new Pair(k, v); +} +class Pair { + constructor(key, value = null) { + Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR }); + this.key = key; + this.value = value; + } + clone(schema) { + let { key, value } = this; + if (identity.isNode(key)) + key = key.clone(schema); + if (identity.isNode(value)) + value = value.clone(schema); + return new Pair(key, value); + } + toJSON(_, ctx) { + const pair = ctx?.mapAsMap ? new Map() : {}; + return addPairToJSMap.addPairToJSMap(ctx, pair, this); + } + toString(ctx, onComment, onChompKeep) { + return ctx?.doc + ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep) + : JSON.stringify(this); + } +} + +exports.Pair = Pair; +exports.createPair = createPair; + + +/***/ }), + +/***/ 9338: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Node = __nccwpck_require__(1399); +var toJS = __nccwpck_require__(2463); + +const isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object'); +class Scalar extends Node.NodeBase { + constructor(value) { + super(identity.SCALAR); + this.value = value; + } + toJSON(arg, ctx) { + return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx); + } + toString() { + return String(this.value); + } +} +Scalar.BLOCK_FOLDED = 'BLOCK_FOLDED'; +Scalar.BLOCK_LITERAL = 'BLOCK_LITERAL'; +Scalar.PLAIN = 'PLAIN'; +Scalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE'; +Scalar.QUOTE_SINGLE = 'QUOTE_SINGLE'; + +exports.Scalar = Scalar; +exports.isScalarValue = isScalarValue; + + +/***/ }), + +/***/ 6011: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyCollection = __nccwpck_require__(2466); +var addPairToJSMap = __nccwpck_require__(4676); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); + +function findPair(items, key) { + const k = identity.isScalar(key) ? key.value : key; + for (const it of items) { + if (identity.isPair(it)) { + if (it.key === key || it.key === k) + return it; + if (identity.isScalar(it.key) && it.key.value === k) + return it; + } + } + return undefined; +} +class YAMLMap extends Collection.Collection { + static get tagName() { + return 'tag:yaml.org,2002:map'; + } + constructor(schema) { + super(identity.MAP, schema); + this.items = []; + } + /** + * A generic collection parsing method that can be extended + * to other node classes that inherit from YAMLMap + */ + static from(schema, obj, ctx) { + const { keepUndefined, replacer } = ctx; + const map = new this(schema); + const add = (key, value) => { + if (typeof replacer === 'function') + value = replacer.call(obj, key, value); + else if (Array.isArray(replacer) && !replacer.includes(key)) + return; + if (value !== undefined || keepUndefined) + map.items.push(Pair.createPair(key, value, ctx)); + }; + if (obj instanceof Map) { + for (const [key, value] of obj) + add(key, value); + } + else if (obj && typeof obj === 'object') { + for (const key of Object.keys(obj)) + add(key, obj[key]); + } + if (typeof schema.sortMapEntries === 'function') { + map.items.sort(schema.sortMapEntries); + } + return map; + } + /** + * Adds a value to the collection. + * + * @param overwrite - If not set `true`, using a key that is already in the + * collection will throw. Otherwise, overwrites the previous value. + */ + add(pair, overwrite) { + let _pair; + if (identity.isPair(pair)) + _pair = pair; + else if (!pair || typeof pair !== 'object' || !('key' in pair)) { + // In TypeScript, this never happens. + _pair = new Pair.Pair(pair, pair?.value); + } + else + _pair = new Pair.Pair(pair.key, pair.value); + const prev = findPair(this.items, _pair.key); + const sortEntries = this.schema?.sortMapEntries; + if (prev) { + if (!overwrite) + throw new Error(`Key ${_pair.key} already set`); + // For scalars, keep the old node & its comments and anchors + if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value)) + prev.value.value = _pair.value; + else + prev.value = _pair.value; + } + else if (sortEntries) { + const i = this.items.findIndex(item => sortEntries(_pair, item) < 0); + if (i === -1) + this.items.push(_pair); + else + this.items.splice(i, 0, _pair); + } + else { + this.items.push(_pair); + } + } + delete(key) { + const it = findPair(this.items, key); + if (!it) + return false; + const del = this.items.splice(this.items.indexOf(it), 1); + return del.length > 0; + } + get(key, keepScalar) { + const it = findPair(this.items, key); + const node = it?.value; + return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined; + } + has(key) { + return !!findPair(this.items, key); + } + set(key, value) { + this.add(new Pair.Pair(key, value), true); + } + /** + * @param ctx - Conversion context, originally set in Document#toJS() + * @param {Class} Type - If set, forces the returned collection type + * @returns Instance of Type, Map, or Object + */ + toJSON(_, ctx, Type) { + const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {}; + if (ctx?.onCreate) + ctx.onCreate(map); + for (const item of this.items) + addPairToJSMap.addPairToJSMap(ctx, map, item); + return map; + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + for (const item of this.items) { + if (!identity.isPair(item)) + throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`); + } + if (!ctx.allNullValues && this.hasAllNullValues(false)) + ctx = Object.assign({}, ctx, { allNullValues: true }); + return stringifyCollection.stringifyCollection(this, ctx, { + blockItemPrefix: '', + flowChars: { start: '{', end: '}' }, + itemIndent: ctx.indent || '', + onChompKeep, + onComment + }); + } +} + +exports.YAMLMap = YAMLMap; +exports.findPair = findPair; + + +/***/ }), + +/***/ 5161: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var createNode = __nccwpck_require__(9652); +var stringifyCollection = __nccwpck_require__(2466); +var Collection = __nccwpck_require__(3466); +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var toJS = __nccwpck_require__(2463); + +class YAMLSeq extends Collection.Collection { + static get tagName() { + return 'tag:yaml.org,2002:seq'; + } + constructor(schema) { + super(identity.SEQ, schema); + this.items = []; + } + add(value) { + this.items.push(value); + } + /** + * Removes a value from the collection. + * + * `key` must contain a representation of an integer for this to succeed. + * It may be wrapped in a `Scalar`. + * + * @returns `true` if the item was found and removed. + */ + delete(key) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + return false; + const del = this.items.splice(idx, 1); + return del.length > 0; + } + get(key, keepScalar) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + return undefined; + const it = this.items[idx]; + return !keepScalar && identity.isScalar(it) ? it.value : it; + } + /** + * Checks if the collection includes a value with the key `key`. + * + * `key` must contain a representation of an integer for this to succeed. + * It may be wrapped in a `Scalar`. + */ + has(key) { + const idx = asItemIndex(key); + return typeof idx === 'number' && idx < this.items.length; + } + /** + * Sets a value in this collection. For `!!set`, `value` needs to be a + * boolean to add/remove the item from the set. + * + * If `key` does not contain a representation of an integer, this will throw. + * It may be wrapped in a `Scalar`. + */ + set(key, value) { + const idx = asItemIndex(key); + if (typeof idx !== 'number') + throw new Error(`Expected a valid index, not ${key}.`); + const prev = this.items[idx]; + if (identity.isScalar(prev) && Scalar.isScalarValue(value)) + prev.value = value; + else + this.items[idx] = value; + } + toJSON(_, ctx) { + const seq = []; + if (ctx?.onCreate) + ctx.onCreate(seq); + let i = 0; + for (const item of this.items) + seq.push(toJS.toJS(item, String(i++), ctx)); + return seq; + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + return stringifyCollection.stringifyCollection(this, ctx, { + blockItemPrefix: '- ', + flowChars: { start: '[', end: ']' }, + itemIndent: (ctx.indent || '') + ' ', + onChompKeep, + onComment + }); + } + static from(schema, obj, ctx) { + const { replacer } = ctx; + const seq = new this(schema); + if (obj && Symbol.iterator in Object(obj)) { + let i = 0; + for (let it of obj) { + if (typeof replacer === 'function') { + const key = obj instanceof Set ? it : String(i++); + it = replacer.call(obj, key, it); + } + seq.items.push(createNode.createNode(it, undefined, ctx)); + } + } + return seq; + } +} +function asItemIndex(key) { + let idx = identity.isScalar(key) ? key.value : key; + if (idx && typeof idx === 'string') + idx = Number(idx); + return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0 + ? idx + : null; +} + +exports.YAMLSeq = YAMLSeq; + + +/***/ }), + +/***/ 4676: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var log = __nccwpck_require__(6909); +var merge = __nccwpck_require__(9614); +var stringify = __nccwpck_require__(8409); +var identity = __nccwpck_require__(5589); +var toJS = __nccwpck_require__(2463); + +function addPairToJSMap(ctx, map, { key, value }) { + if (identity.isNode(key) && key.addToJSMap) + key.addToJSMap(ctx, map, value); + // TODO: Should drop this special case for bare << handling + else if (merge.isMergeKey(ctx, key)) + merge.addMergeToJSMap(ctx, map, value); + else { + const jsKey = toJS.toJS(key, '', ctx); + if (map instanceof Map) { + map.set(jsKey, toJS.toJS(value, jsKey, ctx)); + } + else if (map instanceof Set) { + map.add(jsKey); + } + else { + const stringKey = stringifyKey(key, jsKey, ctx); + const jsValue = toJS.toJS(value, stringKey, ctx); + if (stringKey in map) + Object.defineProperty(map, stringKey, { + value: jsValue, + writable: true, + enumerable: true, + configurable: true + }); + else + map[stringKey] = jsValue; + } + } + return map; +} +function stringifyKey(key, jsKey, ctx) { + if (jsKey === null) + return ''; + if (typeof jsKey !== 'object') + return String(jsKey); + if (identity.isNode(key) && ctx?.doc) { + const strCtx = stringify.createStringifyContext(ctx.doc, {}); + strCtx.anchors = new Set(); + for (const node of ctx.anchors.keys()) + strCtx.anchors.add(node.anchor); + strCtx.inFlow = true; + strCtx.inStringifyKey = true; + const strKey = key.toString(strCtx); + if (!ctx.mapKeyWarned) { + let jsonStr = JSON.stringify(strKey); + if (jsonStr.length > 40) + jsonStr = jsonStr.substring(0, 36) + '..."'; + log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`); + ctx.mapKeyWarned = true; + } + return strKey; + } + return JSON.stringify(jsKey); +} + +exports.addPairToJSMap = addPairToJSMap; + + +/***/ }), + +/***/ 5589: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const ALIAS = Symbol.for('yaml.alias'); +const DOC = Symbol.for('yaml.document'); +const MAP = Symbol.for('yaml.map'); +const PAIR = Symbol.for('yaml.pair'); +const SCALAR = Symbol.for('yaml.scalar'); +const SEQ = Symbol.for('yaml.seq'); +const NODE_TYPE = Symbol.for('yaml.node.type'); +const isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS; +const isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC; +const isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP; +const isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR; +const isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR; +const isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ; +function isCollection(node) { + if (node && typeof node === 'object') + switch (node[NODE_TYPE]) { + case MAP: + case SEQ: + return true; + } + return false; +} +function isNode(node) { + if (node && typeof node === 'object') + switch (node[NODE_TYPE]) { + case ALIAS: + case MAP: + case SCALAR: + case SEQ: + return true; + } + return false; +} +const hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor; + +exports.ALIAS = ALIAS; +exports.DOC = DOC; +exports.MAP = MAP; +exports.NODE_TYPE = NODE_TYPE; +exports.PAIR = PAIR; +exports.SCALAR = SCALAR; +exports.SEQ = SEQ; +exports.hasAnchor = hasAnchor; +exports.isAlias = isAlias; +exports.isCollection = isCollection; +exports.isDocument = isDocument; +exports.isMap = isMap; +exports.isNode = isNode; +exports.isPair = isPair; +exports.isScalar = isScalar; +exports.isSeq = isSeq; + + +/***/ }), + +/***/ 2463: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +/** + * Recursively convert any node or its contents to native JavaScript + * + * @param value - The input value + * @param arg - If `value` defines a `toJSON()` method, use this + * as its first argument + * @param ctx - Conversion context, originally set in Document#toJS(). If + * `{ keep: true }` is not set, output should be suitable for JSON + * stringification. + */ +function toJS(value, arg, ctx) { + // eslint-disable-next-line @typescript-eslint/no-unsafe-return + if (Array.isArray(value)) + return value.map((v, i) => toJS(v, String(i), ctx)); + if (value && typeof value.toJSON === 'function') { + // eslint-disable-next-line @typescript-eslint/no-unsafe-call + if (!ctx || !identity.hasAnchor(value)) + return value.toJSON(arg, ctx); + const data = { aliasCount: 0, count: 1, res: undefined }; + ctx.anchors.set(value, data); + ctx.onCreate = res => { + data.res = res; + delete ctx.onCreate; + }; + const res = value.toJSON(arg, ctx); + if (ctx.onCreate) + ctx.onCreate(res); + return res; + } + if (typeof value === 'bigint' && !ctx?.keep) + return Number(value); + return value; +} + +exports.toJS = toJS; + + +/***/ }), + +/***/ 9027: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var resolveBlockScalar = __nccwpck_require__(9485); +var resolveFlowScalar = __nccwpck_require__(7578); +var errors = __nccwpck_require__(4236); +var stringifyString = __nccwpck_require__(6226); + +function resolveAsScalar(token, strict = true, onError) { + if (token) { + const _onError = (pos, code, message) => { + const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset; + if (onError) + onError(offset, code, message); + else + throw new errors.YAMLParseError([offset, offset + 1], code, message); + }; + switch (token.type) { + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return resolveFlowScalar.resolveFlowScalar(token, strict, _onError); + case 'block-scalar': + return resolveBlockScalar.resolveBlockScalar({ options: { strict } }, token, _onError); + } + } + return null; +} +/** + * Create a new scalar token with `value` + * + * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, + * as this function does not support any schema operations and won't check for such conflicts. + * + * @param value The string representation of the value, which will have its content properly indented. + * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added. + * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. + * @param context.indent The indent level of the token. + * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value. + * @param context.offset The offset position of the token. + * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. + */ +function createScalarToken(value, context) { + const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context; + const source = stringifyString.stringifyString({ type, value }, { + implicitKey, + indent: indent > 0 ? ' '.repeat(indent) : '', + inFlow, + options: { blockQuote: true, lineWidth: -1 } + }); + const end = context.end ?? [ + { type: 'newline', offset: -1, indent, source: '\n' } + ]; + switch (source[0]) { + case '|': + case '>': { + const he = source.indexOf('\n'); + const head = source.substring(0, he); + const body = source.substring(he + 1) + '\n'; + const props = [ + { type: 'block-scalar-header', offset, indent, source: head } + ]; + if (!addEndtoBlockProps(props, end)) + props.push({ type: 'newline', offset: -1, indent, source: '\n' }); + return { type: 'block-scalar', offset, indent, props, source: body }; + } + case '"': + return { type: 'double-quoted-scalar', offset, indent, source, end }; + case "'": + return { type: 'single-quoted-scalar', offset, indent, source, end }; + default: + return { type: 'scalar', offset, indent, source, end }; + } +} +/** + * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have. + * + * Best efforts are made to retain any comments previously associated with the `token`, + * though all contents within a collection's `items` will be overwritten. + * + * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`, + * as this function does not support any schema operations and won't check for such conflicts. + * + * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key. + * @param value The string representation of the value, which will have its content properly indented. + * @param context.afterKey In most cases, values after a key should have an additional level of indentation. + * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value. + * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value. + * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`. + */ +function setScalarValue(token, value, context = {}) { + let { afterKey = false, implicitKey = false, inFlow = false, type } = context; + let indent = 'indent' in token ? token.indent : null; + if (afterKey && typeof indent === 'number') + indent += 2; + if (!type) + switch (token.type) { + case 'single-quoted-scalar': + type = 'QUOTE_SINGLE'; + break; + case 'double-quoted-scalar': + type = 'QUOTE_DOUBLE'; + break; + case 'block-scalar': { + const header = token.props[0]; + if (header.type !== 'block-scalar-header') + throw new Error('Invalid block scalar header'); + type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL'; + break; + } + default: + type = 'PLAIN'; + } + const source = stringifyString.stringifyString({ type, value }, { + implicitKey: implicitKey || indent === null, + indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '', + inFlow, + options: { blockQuote: true, lineWidth: -1 } + }); + switch (source[0]) { + case '|': + case '>': + setBlockScalarValue(token, source); + break; + case '"': + setFlowScalarValue(token, source, 'double-quoted-scalar'); + break; + case "'": + setFlowScalarValue(token, source, 'single-quoted-scalar'); + break; + default: + setFlowScalarValue(token, source, 'scalar'); + } +} +function setBlockScalarValue(token, source) { + const he = source.indexOf('\n'); + const head = source.substring(0, he); + const body = source.substring(he + 1) + '\n'; + if (token.type === 'block-scalar') { + const header = token.props[0]; + if (header.type !== 'block-scalar-header') + throw new Error('Invalid block scalar header'); + header.source = head; + token.source = body; + } + else { + const { offset } = token; + const indent = 'indent' in token ? token.indent : -1; + const props = [ + { type: 'block-scalar-header', offset, indent, source: head } + ]; + if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined)) + props.push({ type: 'newline', offset: -1, indent, source: '\n' }); + for (const key of Object.keys(token)) + if (key !== 'type' && key !== 'offset') + delete token[key]; + Object.assign(token, { type: 'block-scalar', indent, props, source: body }); + } +} +/** @returns `true` if last token is a newline */ +function addEndtoBlockProps(props, end) { + if (end) + for (const st of end) + switch (st.type) { + case 'space': + case 'comment': + props.push(st); + break; + case 'newline': + props.push(st); + return true; + } + return false; +} +function setFlowScalarValue(token, source, type) { + switch (token.type) { + case 'scalar': + case 'double-quoted-scalar': + case 'single-quoted-scalar': + token.type = type; + token.source = source; + break; + case 'block-scalar': { + const end = token.props.slice(1); + let oa = source.length; + if (token.props[0].type === 'block-scalar-header') + oa -= token.props[0].source.length; + for (const tok of end) + tok.offset += oa; + delete token.props; + Object.assign(token, { type, source, end }); + break; + } + case 'block-map': + case 'block-seq': { + const offset = token.offset + source.length; + const nl = { type: 'newline', offset, indent: token.indent, source: '\n' }; + delete token.items; + Object.assign(token, { type, source, end: [nl] }); + break; + } + default: { + const indent = 'indent' in token ? token.indent : -1; + const end = 'end' in token && Array.isArray(token.end) + ? token.end.filter(st => st.type === 'space' || + st.type === 'comment' || + st.type === 'newline') + : []; + for (const key of Object.keys(token)) + if (key !== 'type' && key !== 'offset') + delete token[key]; + Object.assign(token, { type, indent, source, end }); + } + } +} + +exports.createScalarToken = createScalarToken; +exports.resolveAsScalar = resolveAsScalar; +exports.setScalarValue = setScalarValue; + + +/***/ }), + +/***/ 6307: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Stringify a CST document, token, or collection item + * + * Fair warning: This applies no validation whatsoever, and + * simply concatenates the sources in their logical order. + */ +const stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst); +function stringifyToken(token) { + switch (token.type) { + case 'block-scalar': { + let res = ''; + for (const tok of token.props) + res += stringifyToken(tok); + return res + token.source; + } + case 'block-map': + case 'block-seq': { + let res = ''; + for (const item of token.items) + res += stringifyItem(item); + return res; + } + case 'flow-collection': { + let res = token.start.source; + for (const item of token.items) + res += stringifyItem(item); + for (const st of token.end) + res += st.source; + return res; + } + case 'document': { + let res = stringifyItem(token); + if (token.end) + for (const st of token.end) + res += st.source; + return res; + } + default: { + let res = token.source; + if ('end' in token && token.end) + for (const st of token.end) + res += st.source; + return res; + } + } +} +function stringifyItem({ start, key, sep, value }) { + let res = ''; + for (const st of start) + res += st.source; + if (key) + res += stringifyToken(key); + if (sep) + for (const st of sep) + res += st.source; + if (value) + res += stringifyToken(value); + return res; +} + +exports.stringify = stringify; + + +/***/ }), + +/***/ 8497: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const BREAK = Symbol('break visit'); +const SKIP = Symbol('skip children'); +const REMOVE = Symbol('remove item'); +/** + * Apply a visitor to a CST document or item. + * + * Walks through the tree (depth-first) starting from the root, calling a + * `visitor` function with two arguments when entering each item: + * - `item`: The current item, which included the following members: + * - `start: SourceToken[]` – Source tokens before the key or value, + * possibly including its anchor or tag. + * - `key?: Token | null` – Set for pair values. May then be `null`, if + * the key before the `:` separator is empty. + * - `sep?: SourceToken[]` – Source tokens between the key and the value, + * which should include the `:` map value indicator if `value` is set. + * - `value?: Token` – The value of a sequence item, or of a map pair. + * - `path`: The steps from the root to the current node, as an array of + * `['key' | 'value', number]` tuples. + * + * The return value of the visitor may be used to control the traversal: + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this token, continue with + * next sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current item, then continue with the next one + * - `number`: Set the index of the next step. This is useful especially if + * the index of the current token has changed. + * - `function`: Define the next visitor for this item. After the original + * visitor is called on item entry, next visitors are called after handling + * a non-empty `key` and when exiting the item. + */ +function visit(cst, visitor) { + if ('type' in cst && cst.type === 'document') + cst = { start: cst.start, value: cst.value }; + _visit(Object.freeze([]), cst, visitor); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visit.BREAK = BREAK; +/** Do not visit the children of the current item */ +visit.SKIP = SKIP; +/** Remove the current item */ +visit.REMOVE = REMOVE; +/** Find the item at `path` from `cst` as the root */ +visit.itemAtPath = (cst, path) => { + let item = cst; + for (const [field, index] of path) { + const tok = item?.[field]; + if (tok && 'items' in tok) { + item = tok.items[index]; + } + else + return undefined; + } + return item; +}; +/** + * Get the immediate parent collection of the item at `path` from `cst` as the root. + * + * Throws an error if the collection is not found, which should never happen if the item itself exists. + */ +visit.parentCollection = (cst, path) => { + const parent = visit.itemAtPath(cst, path.slice(0, -1)); + const field = path[path.length - 1][0]; + const coll = parent?.[field]; + if (coll && 'items' in coll) + return coll; + throw new Error('Parent collection not found'); +}; +function _visit(path, item, visitor) { + let ctrl = visitor(item, path); + if (typeof ctrl === 'symbol') + return ctrl; + for (const field of ['key', 'value']) { + const token = item[field]; + if (token && 'items' in token) { + for (let i = 0; i < token.items.length; ++i) { + const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + token.items.splice(i, 1); + i -= 1; + } + } + if (typeof ctrl === 'function' && field === 'key') + ctrl = ctrl(item, path); + } + } + return typeof ctrl === 'function' ? ctrl(item, path) : ctrl; +} + +exports.visit = visit; + + +/***/ }), + +/***/ 9169: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cstScalar = __nccwpck_require__(9027); +var cstStringify = __nccwpck_require__(6307); +var cstVisit = __nccwpck_require__(8497); + +/** The byte order mark */ +const BOM = '\u{FEFF}'; +/** Start of doc-mode */ +const DOCUMENT = '\x02'; // C0: Start of Text +/** Unexpected end of flow-mode */ +const FLOW_END = '\x18'; // C0: Cancel +/** Next token is a scalar value */ +const SCALAR = '\x1f'; // C0: Unit Separator +/** @returns `true` if `token` is a flow or block collection */ +const isCollection = (token) => !!token && 'items' in token; +/** @returns `true` if `token` is a flow or block scalar; not an alias */ +const isScalar = (token) => !!token && + (token.type === 'scalar' || + token.type === 'single-quoted-scalar' || + token.type === 'double-quoted-scalar' || + token.type === 'block-scalar'); +/* istanbul ignore next */ +/** Get a printable representation of a lexer token */ +function prettyToken(token) { + switch (token) { + case BOM: + return ''; + case DOCUMENT: + return ''; + case FLOW_END: + return ''; + case SCALAR: + return ''; + default: + return JSON.stringify(token); + } +} +/** Identify the type of a lexer token. May return `null` for unknown tokens. */ +function tokenType(source) { + switch (source) { + case BOM: + return 'byte-order-mark'; + case DOCUMENT: + return 'doc-mode'; + case FLOW_END: + return 'flow-error-end'; + case SCALAR: + return 'scalar'; + case '---': + return 'doc-start'; + case '...': + return 'doc-end'; + case '': + case '\n': + case '\r\n': + return 'newline'; + case '-': + return 'seq-item-ind'; + case '?': + return 'explicit-key-ind'; + case ':': + return 'map-value-ind'; + case '{': + return 'flow-map-start'; + case '}': + return 'flow-map-end'; + case '[': + return 'flow-seq-start'; + case ']': + return 'flow-seq-end'; + case ',': + return 'comma'; + } + switch (source[0]) { + case ' ': + case '\t': + return 'space'; + case '#': + return 'comment'; + case '%': + return 'directive-line'; + case '*': + return 'alias'; + case '&': + return 'anchor'; + case '!': + return 'tag'; + case "'": + return 'single-quoted-scalar'; + case '"': + return 'double-quoted-scalar'; + case '|': + case '>': + return 'block-scalar-header'; + } + return null; +} + +exports.createScalarToken = cstScalar.createScalarToken; +exports.resolveAsScalar = cstScalar.resolveAsScalar; +exports.setScalarValue = cstScalar.setScalarValue; +exports.stringify = cstStringify.stringify; +exports.visit = cstVisit.visit; +exports.BOM = BOM; +exports.DOCUMENT = DOCUMENT; +exports.FLOW_END = FLOW_END; +exports.SCALAR = SCALAR; +exports.isCollection = isCollection; +exports.isScalar = isScalar; +exports.prettyToken = prettyToken; +exports.tokenType = tokenType; + + +/***/ }), + +/***/ 5976: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cst = __nccwpck_require__(9169); + +/* +START -> stream + +stream + directive -> line-end -> stream + indent + line-end -> stream + [else] -> line-start + +line-end + comment -> line-end + newline -> . + input-end -> END + +line-start + doc-start -> doc + doc-end -> stream + [else] -> indent -> block-start + +block-start + seq-item-start -> block-start + explicit-key-start -> block-start + map-value-start -> block-start + [else] -> doc + +doc + line-end -> line-start + spaces -> doc + anchor -> doc + tag -> doc + flow-start -> flow -> doc + flow-end -> error -> doc + seq-item-start -> error -> doc + explicit-key-start -> error -> doc + map-value-start -> doc + alias -> doc + quote-start -> quoted-scalar -> doc + block-scalar-header -> line-end -> block-scalar(min) -> line-start + [else] -> plain-scalar(false, min) -> doc + +flow + line-end -> flow + spaces -> flow + anchor -> flow + tag -> flow + flow-start -> flow -> flow + flow-end -> . + seq-item-start -> error -> flow + explicit-key-start -> flow + map-value-start -> flow + alias -> flow + quote-start -> quoted-scalar -> flow + comma -> flow + [else] -> plain-scalar(true, 0) -> flow + +quoted-scalar + quote-end -> . + [else] -> quoted-scalar + +block-scalar(min) + newline + peek(indent < min) -> . + [else] -> block-scalar(min) + +plain-scalar(is-flow, min) + scalar-end(is-flow) -> . + peek(newline + (indent < min)) -> . + [else] -> plain-scalar(min) +*/ +function isEmpty(ch) { + switch (ch) { + case undefined: + case ' ': + case '\n': + case '\r': + case '\t': + return true; + default: + return false; + } +} +const hexDigits = new Set('0123456789ABCDEFabcdef'); +const tagChars = new Set("0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()"); +const flowIndicatorChars = new Set(',[]{}'); +const invalidAnchorChars = new Set(' ,[]{}\n\r\t'); +const isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch); +/** + * Splits an input string into lexical tokens, i.e. smaller strings that are + * easily identifiable by `tokens.tokenType()`. + * + * Lexing starts always in a "stream" context. Incomplete input may be buffered + * until a complete token can be emitted. + * + * In addition to slices of the original input, the following control characters + * may also be emitted: + * + * - `\x02` (Start of Text): A document starts with the next token + * - `\x18` (Cancel): Unexpected end of flow-mode (indicates an error) + * - `\x1f` (Unit Separator): Next token is a scalar value + * - `\u{FEFF}` (Byte order mark): Emitted separately outside documents + */ +class Lexer { + constructor() { + /** + * Flag indicating whether the end of the current buffer marks the end of + * all input + */ + this.atEnd = false; + /** + * Explicit indent set in block scalar header, as an offset from the current + * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not + * explicitly set. + */ + this.blockScalarIndent = -1; + /** + * Block scalars that include a + (keep) chomping indicator in their header + * include trailing empty lines, which are otherwise excluded from the + * scalar's contents. + */ + this.blockScalarKeep = false; + /** Current input */ + this.buffer = ''; + /** + * Flag noting whether the map value indicator : can immediately follow this + * node within a flow context. + */ + this.flowKey = false; + /** Count of surrounding flow collection levels. */ + this.flowLevel = 0; + /** + * Minimum level of indentation required for next lines to be parsed as a + * part of the current scalar value. + */ + this.indentNext = 0; + /** Indentation level of the current line. */ + this.indentValue = 0; + /** Position of the next \n character. */ + this.lineEndPos = null; + /** Stores the state of the lexer if reaching the end of incpomplete input */ + this.next = null; + /** A pointer to `buffer`; the current position of the lexer. */ + this.pos = 0; + } + /** + * Generate YAML tokens from the `source` string. If `incomplete`, + * a part of the last line may be left as a buffer for the next call. + * + * @returns A generator of lexical tokens + */ + *lex(source, incomplete = false) { + if (source) { + if (typeof source !== 'string') + throw TypeError('source is not a string'); + this.buffer = this.buffer ? this.buffer + source : source; + this.lineEndPos = null; + } + this.atEnd = !incomplete; + let next = this.next ?? 'stream'; + while (next && (incomplete || this.hasChars(1))) + next = yield* this.parseNext(next); + } + atLineEnd() { + let i = this.pos; + let ch = this.buffer[i]; + while (ch === ' ' || ch === '\t') + ch = this.buffer[++i]; + if (!ch || ch === '#' || ch === '\n') + return true; + if (ch === '\r') + return this.buffer[i + 1] === '\n'; + return false; + } + charAt(n) { + return this.buffer[this.pos + n]; + } + continueScalar(offset) { + let ch = this.buffer[offset]; + if (this.indentNext > 0) { + let indent = 0; + while (ch === ' ') + ch = this.buffer[++indent + offset]; + if (ch === '\r') { + const next = this.buffer[indent + offset + 1]; + if (next === '\n' || (!next && !this.atEnd)) + return offset + indent + 1; + } + return ch === '\n' || indent >= this.indentNext || (!ch && !this.atEnd) + ? offset + indent + : -1; + } + if (ch === '-' || ch === '.') { + const dt = this.buffer.substr(offset, 3); + if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3])) + return -1; + } + return offset; + } + getLine() { + let end = this.lineEndPos; + if (typeof end !== 'number' || (end !== -1 && end < this.pos)) { + end = this.buffer.indexOf('\n', this.pos); + this.lineEndPos = end; + } + if (end === -1) + return this.atEnd ? this.buffer.substring(this.pos) : null; + if (this.buffer[end - 1] === '\r') + end -= 1; + return this.buffer.substring(this.pos, end); + } + hasChars(n) { + return this.pos + n <= this.buffer.length; + } + setNext(state) { + this.buffer = this.buffer.substring(this.pos); + this.pos = 0; + this.lineEndPos = null; + this.next = state; + return null; + } + peek(n) { + return this.buffer.substr(this.pos, n); + } + *parseNext(next) { + switch (next) { + case 'stream': + return yield* this.parseStream(); + case 'line-start': + return yield* this.parseLineStart(); + case 'block-start': + return yield* this.parseBlockStart(); + case 'doc': + return yield* this.parseDocument(); + case 'flow': + return yield* this.parseFlowCollection(); + case 'quoted-scalar': + return yield* this.parseQuotedScalar(); + case 'block-scalar': + return yield* this.parseBlockScalar(); + case 'plain-scalar': + return yield* this.parsePlainScalar(); + } + } + *parseStream() { + let line = this.getLine(); + if (line === null) + return this.setNext('stream'); + if (line[0] === cst.BOM) { + yield* this.pushCount(1); + line = line.substring(1); + } + if (line[0] === '%') { + let dirEnd = line.length; + let cs = line.indexOf('#'); + while (cs !== -1) { + const ch = line[cs - 1]; + if (ch === ' ' || ch === '\t') { + dirEnd = cs - 1; + break; + } + else { + cs = line.indexOf('#', cs + 1); + } + } + while (true) { + const ch = line[dirEnd - 1]; + if (ch === ' ' || ch === '\t') + dirEnd -= 1; + else + break; + } + const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true)); + yield* this.pushCount(line.length - n); // possible comment + this.pushNewline(); + return 'stream'; + } + if (this.atLineEnd()) { + const sp = yield* this.pushSpaces(true); + yield* this.pushCount(line.length - sp); + yield* this.pushNewline(); + return 'stream'; + } + yield cst.DOCUMENT; + return yield* this.parseLineStart(); + } + *parseLineStart() { + const ch = this.charAt(0); + if (!ch && !this.atEnd) + return this.setNext('line-start'); + if (ch === '-' || ch === '.') { + if (!this.atEnd && !this.hasChars(4)) + return this.setNext('line-start'); + const s = this.peek(3); + if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) { + yield* this.pushCount(3); + this.indentValue = 0; + this.indentNext = 0; + return s === '---' ? 'doc' : 'stream'; + } + } + this.indentValue = yield* this.pushSpaces(false); + if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1))) + this.indentNext = this.indentValue; + return yield* this.parseBlockStart(); + } + *parseBlockStart() { + const [ch0, ch1] = this.peek(2); + if (!ch1 && !this.atEnd) + return this.setNext('block-start'); + if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) { + const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true)); + this.indentNext = this.indentValue + 1; + this.indentValue += n; + return yield* this.parseBlockStart(); + } + return 'doc'; + } + *parseDocument() { + yield* this.pushSpaces(true); + const line = this.getLine(); + if (line === null) + return this.setNext('doc'); + let n = yield* this.pushIndicators(); + switch (line[n]) { + case '#': + yield* this.pushCount(line.length - n); + // fallthrough + case undefined: + yield* this.pushNewline(); + return yield* this.parseLineStart(); + case '{': + case '[': + yield* this.pushCount(1); + this.flowKey = false; + this.flowLevel = 1; + return 'flow'; + case '}': + case ']': + // this is an error + yield* this.pushCount(1); + return 'doc'; + case '*': + yield* this.pushUntil(isNotAnchorChar); + return 'doc'; + case '"': + case "'": + return yield* this.parseQuotedScalar(); + case '|': + case '>': + n += yield* this.parseBlockScalarHeader(); + n += yield* this.pushSpaces(true); + yield* this.pushCount(line.length - n); + yield* this.pushNewline(); + return yield* this.parseBlockScalar(); + default: + return yield* this.parsePlainScalar(); + } + } + *parseFlowCollection() { + let nl, sp; + let indent = -1; + do { + nl = yield* this.pushNewline(); + if (nl > 0) { + sp = yield* this.pushSpaces(false); + this.indentValue = indent = sp; + } + else { + sp = 0; + } + sp += yield* this.pushSpaces(true); + } while (nl + sp > 0); + const line = this.getLine(); + if (line === null) + return this.setNext('flow'); + if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') || + (indent === 0 && + (line.startsWith('---') || line.startsWith('...')) && + isEmpty(line[3]))) { + // Allowing for the terminal ] or } at the same (rather than greater) + // indent level as the initial [ or { is technically invalid, but + // failing here would be surprising to users. + const atFlowEndMarker = indent === this.indentNext - 1 && + this.flowLevel === 1 && + (line[0] === ']' || line[0] === '}'); + if (!atFlowEndMarker) { + // this is an error + this.flowLevel = 0; + yield cst.FLOW_END; + return yield* this.parseLineStart(); + } + } + let n = 0; + while (line[n] === ',') { + n += yield* this.pushCount(1); + n += yield* this.pushSpaces(true); + this.flowKey = false; + } + n += yield* this.pushIndicators(); + switch (line[n]) { + case undefined: + return 'flow'; + case '#': + yield* this.pushCount(line.length - n); + return 'flow'; + case '{': + case '[': + yield* this.pushCount(1); + this.flowKey = false; + this.flowLevel += 1; + return 'flow'; + case '}': + case ']': + yield* this.pushCount(1); + this.flowKey = true; + this.flowLevel -= 1; + return this.flowLevel ? 'flow' : 'doc'; + case '*': + yield* this.pushUntil(isNotAnchorChar); + return 'flow'; + case '"': + case "'": + this.flowKey = true; + return yield* this.parseQuotedScalar(); + case ':': { + const next = this.charAt(1); + if (this.flowKey || isEmpty(next) || next === ',') { + this.flowKey = false; + yield* this.pushCount(1); + yield* this.pushSpaces(true); + return 'flow'; + } + } + // fallthrough + default: + this.flowKey = false; + return yield* this.parsePlainScalar(); + } + } + *parseQuotedScalar() { + const quote = this.charAt(0); + let end = this.buffer.indexOf(quote, this.pos + 1); + if (quote === "'") { + while (end !== -1 && this.buffer[end + 1] === "'") + end = this.buffer.indexOf("'", end + 2); + } + else { + // double-quote + while (end !== -1) { + let n = 0; + while (this.buffer[end - 1 - n] === '\\') + n += 1; + if (n % 2 === 0) + break; + end = this.buffer.indexOf('"', end + 1); + } + } + // Only looking for newlines within the quotes + const qb = this.buffer.substring(0, end); + let nl = qb.indexOf('\n', this.pos); + if (nl !== -1) { + while (nl !== -1) { + const cs = this.continueScalar(nl + 1); + if (cs === -1) + break; + nl = qb.indexOf('\n', cs); + } + if (nl !== -1) { + // this is an error caused by an unexpected unindent + end = nl - (qb[nl - 1] === '\r' ? 2 : 1); + } + } + if (end === -1) { + if (!this.atEnd) + return this.setNext('quoted-scalar'); + end = this.buffer.length; + } + yield* this.pushToIndex(end + 1, false); + return this.flowLevel ? 'flow' : 'doc'; + } + *parseBlockScalarHeader() { + this.blockScalarIndent = -1; + this.blockScalarKeep = false; + let i = this.pos; + while (true) { + const ch = this.buffer[++i]; + if (ch === '+') + this.blockScalarKeep = true; + else if (ch > '0' && ch <= '9') + this.blockScalarIndent = Number(ch) - 1; + else if (ch !== '-') + break; + } + return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#'); + } + *parseBlockScalar() { + let nl = this.pos - 1; // may be -1 if this.pos === 0 + let indent = 0; + let ch; + loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) { + switch (ch) { + case ' ': + indent += 1; + break; + case '\n': + nl = i; + indent = 0; + break; + case '\r': { + const next = this.buffer[i + 1]; + if (!next && !this.atEnd) + return this.setNext('block-scalar'); + if (next === '\n') + break; + } // fallthrough + default: + break loop; + } + } + if (!ch && !this.atEnd) + return this.setNext('block-scalar'); + if (indent >= this.indentNext) { + if (this.blockScalarIndent === -1) + this.indentNext = indent; + else { + this.indentNext = + this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext); + } + do { + const cs = this.continueScalar(nl + 1); + if (cs === -1) + break; + nl = this.buffer.indexOf('\n', cs); + } while (nl !== -1); + if (nl === -1) { + if (!this.atEnd) + return this.setNext('block-scalar'); + nl = this.buffer.length; + } + } + // Trailing insufficiently indented tabs are invalid. + // To catch that during parsing, we include them in the block scalar value. + let i = nl + 1; + ch = this.buffer[i]; + while (ch === ' ') + ch = this.buffer[++i]; + if (ch === '\t') { + while (ch === '\t' || ch === ' ' || ch === '\r' || ch === '\n') + ch = this.buffer[++i]; + nl = i - 1; + } + else if (!this.blockScalarKeep) { + do { + let i = nl - 1; + let ch = this.buffer[i]; + if (ch === '\r') + ch = this.buffer[--i]; + const lastChar = i; // Drop the line if last char not more indented + while (ch === ' ') + ch = this.buffer[--i]; + if (ch === '\n' && i >= this.pos && i + 1 + indent > lastChar) + nl = i; + else + break; + } while (true); + } + yield cst.SCALAR; + yield* this.pushToIndex(nl + 1, true); + return yield* this.parseLineStart(); + } + *parsePlainScalar() { + const inFlow = this.flowLevel > 0; + let end = this.pos - 1; + let i = this.pos - 1; + let ch; + while ((ch = this.buffer[++i])) { + if (ch === ':') { + const next = this.buffer[i + 1]; + if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next))) + break; + end = i; + } + else if (isEmpty(ch)) { + let next = this.buffer[i + 1]; + if (ch === '\r') { + if (next === '\n') { + i += 1; + ch = '\n'; + next = this.buffer[i + 1]; + } + else + end = i; + } + if (next === '#' || (inFlow && flowIndicatorChars.has(next))) + break; + if (ch === '\n') { + const cs = this.continueScalar(i + 1); + if (cs === -1) + break; + i = Math.max(i, cs - 2); // to advance, but still account for ' #' + } + } + else { + if (inFlow && flowIndicatorChars.has(ch)) + break; + end = i; + } + } + if (!ch && !this.atEnd) + return this.setNext('plain-scalar'); + yield cst.SCALAR; + yield* this.pushToIndex(end + 1, true); + return inFlow ? 'flow' : 'doc'; + } + *pushCount(n) { + if (n > 0) { + yield this.buffer.substr(this.pos, n); + this.pos += n; + return n; + } + return 0; + } + *pushToIndex(i, allowEmpty) { + const s = this.buffer.slice(this.pos, i); + if (s) { + yield s; + this.pos += s.length; + return s.length; + } + else if (allowEmpty) + yield ''; + return 0; + } + *pushIndicators() { + switch (this.charAt(0)) { + case '!': + return ((yield* this.pushTag()) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + case '&': + return ((yield* this.pushUntil(isNotAnchorChar)) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + case '-': // this is an error + case '?': // this is an error outside flow collections + case ':': { + const inFlow = this.flowLevel > 0; + const ch1 = this.charAt(1); + if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) { + if (!inFlow) + this.indentNext = this.indentValue + 1; + else if (this.flowKey) + this.flowKey = false; + return ((yield* this.pushCount(1)) + + (yield* this.pushSpaces(true)) + + (yield* this.pushIndicators())); + } + } + } + return 0; + } + *pushTag() { + if (this.charAt(1) === '<') { + let i = this.pos + 2; + let ch = this.buffer[i]; + while (!isEmpty(ch) && ch !== '>') + ch = this.buffer[++i]; + return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false); + } + else { + let i = this.pos + 1; + let ch = this.buffer[i]; + while (ch) { + if (tagChars.has(ch)) + ch = this.buffer[++i]; + else if (ch === '%' && + hexDigits.has(this.buffer[i + 1]) && + hexDigits.has(this.buffer[i + 2])) { + ch = this.buffer[(i += 3)]; + } + else + break; + } + return yield* this.pushToIndex(i, false); + } + } + *pushNewline() { + const ch = this.buffer[this.pos]; + if (ch === '\n') + return yield* this.pushCount(1); + else if (ch === '\r' && this.charAt(1) === '\n') + return yield* this.pushCount(2); + else + return 0; + } + *pushSpaces(allowTabs) { + let i = this.pos - 1; + let ch; + do { + ch = this.buffer[++i]; + } while (ch === ' ' || (allowTabs && ch === '\t')); + const n = i - this.pos; + if (n > 0) { + yield this.buffer.substr(this.pos, n); + this.pos = i; + } + return n; + } + *pushUntil(test) { + let i = this.pos; + let ch = this.buffer[i]; + while (!test(ch)) + ch = this.buffer[++i]; + return yield* this.pushToIndex(i, false); + } +} + +exports.Lexer = Lexer; + + +/***/ }), + +/***/ 1929: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Tracks newlines during parsing in order to provide an efficient API for + * determining the one-indexed `{ line, col }` position for any offset + * within the input. + */ +class LineCounter { + constructor() { + this.lineStarts = []; + /** + * Should be called in ascending order. Otherwise, call + * `lineCounter.lineStarts.sort()` before calling `linePos()`. + */ + this.addNewLine = (offset) => this.lineStarts.push(offset); + /** + * Performs a binary search and returns the 1-indexed { line, col } + * position of `offset`. If `line === 0`, `addNewLine` has never been + * called or `offset` is before the first known newline. + */ + this.linePos = (offset) => { + let low = 0; + let high = this.lineStarts.length; + while (low < high) { + const mid = (low + high) >> 1; // Math.floor((low + high) / 2) + if (this.lineStarts[mid] < offset) + low = mid + 1; + else + high = mid; + } + if (this.lineStarts[low] === offset) + return { line: low + 1, col: 1 }; + if (low === 0) + return { line: 0, col: offset }; + const start = this.lineStarts[low - 1]; + return { line: low, col: offset - start + 1 }; + }; + } +} + +exports.LineCounter = LineCounter; + + +/***/ }), + +/***/ 3328: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var cst = __nccwpck_require__(9169); +var lexer = __nccwpck_require__(5976); + +function includesToken(list, type) { + for (let i = 0; i < list.length; ++i) + if (list[i].type === type) + return true; + return false; +} +function findNonEmptyIndex(list) { + for (let i = 0; i < list.length; ++i) { + switch (list[i].type) { + case 'space': + case 'comment': + case 'newline': + break; + default: + return i; + } + } + return -1; +} +function isFlowToken(token) { + switch (token?.type) { + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + case 'flow-collection': + return true; + default: + return false; + } +} +function getPrevProps(parent) { + switch (parent.type) { + case 'document': + return parent.start; + case 'block-map': { + const it = parent.items[parent.items.length - 1]; + return it.sep ?? it.start; + } + case 'block-seq': + return parent.items[parent.items.length - 1].start; + /* istanbul ignore next should not happen */ + default: + return []; + } +} +/** Note: May modify input array */ +function getFirstKeyStartProps(prev) { + if (prev.length === 0) + return []; + let i = prev.length; + loop: while (--i >= 0) { + switch (prev[i].type) { + case 'doc-start': + case 'explicit-key-ind': + case 'map-value-ind': + case 'seq-item-ind': + case 'newline': + break loop; + } + } + while (prev[++i]?.type === 'space') { + /* loop */ + } + return prev.splice(i, prev.length); +} +function fixFlowSeqItems(fc) { + if (fc.start.type === 'flow-seq-start') { + for (const it of fc.items) { + if (it.sep && + !it.value && + !includesToken(it.start, 'explicit-key-ind') && + !includesToken(it.sep, 'map-value-ind')) { + if (it.key) + it.value = it.key; + delete it.key; + if (isFlowToken(it.value)) { + if (it.value.end) + Array.prototype.push.apply(it.value.end, it.sep); + else + it.value.end = it.sep; + } + else + Array.prototype.push.apply(it.start, it.sep); + delete it.sep; + } + } + } +} +/** + * A YAML concrete syntax tree (CST) parser + * + * ```ts + * const src: string = ... + * for (const token of new Parser().parse(src)) { + * // token: Token + * } + * ``` + * + * To use the parser with a user-provided lexer: + * + * ```ts + * function* parse(source: string, lexer: Lexer) { + * const parser = new Parser() + * for (const lexeme of lexer.lex(source)) + * yield* parser.next(lexeme) + * yield* parser.end() + * } + * + * const src: string = ... + * const lexer = new Lexer() + * for (const token of parse(src, lexer)) { + * // token: Token + * } + * ``` + */ +class Parser { + /** + * @param onNewLine - If defined, called separately with the start position of + * each new line (in `parse()`, including the start of input). + */ + constructor(onNewLine) { + /** If true, space and sequence indicators count as indentation */ + this.atNewLine = true; + /** If true, next token is a scalar value */ + this.atScalar = false; + /** Current indentation level */ + this.indent = 0; + /** Current offset since the start of parsing */ + this.offset = 0; + /** On the same line with a block map key */ + this.onKeyLine = false; + /** Top indicates the node that's currently being built */ + this.stack = []; + /** The source of the current token, set in parse() */ + this.source = ''; + /** The type of the current token, set in parse() */ + this.type = ''; + // Must be defined after `next()` + this.lexer = new lexer.Lexer(); + this.onNewLine = onNewLine; + } + /** + * Parse `source` as a YAML stream. + * If `incomplete`, a part of the last line may be left as a buffer for the next call. + * + * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens. + * + * @returns A generator of tokens representing each directive, document, and other structure. + */ + *parse(source, incomplete = false) { + if (this.onNewLine && this.offset === 0) + this.onNewLine(0); + for (const lexeme of this.lexer.lex(source, incomplete)) + yield* this.next(lexeme); + if (!incomplete) + yield* this.end(); + } + /** + * Advance the parser by the `source` of one lexical token. + */ + *next(source) { + this.source = source; + if (process.env.LOG_TOKENS) + console.log('|', cst.prettyToken(source)); + if (this.atScalar) { + this.atScalar = false; + yield* this.step(); + this.offset += source.length; + return; + } + const type = cst.tokenType(source); + if (!type) { + const message = `Not a YAML token: ${source}`; + yield* this.pop({ type: 'error', offset: this.offset, message, source }); + this.offset += source.length; + } + else if (type === 'scalar') { + this.atNewLine = false; + this.atScalar = true; + this.type = 'scalar'; + } + else { + this.type = type; + yield* this.step(); + switch (type) { + case 'newline': + this.atNewLine = true; + this.indent = 0; + if (this.onNewLine) + this.onNewLine(this.offset + source.length); + break; + case 'space': + if (this.atNewLine && source[0] === ' ') + this.indent += source.length; + break; + case 'explicit-key-ind': + case 'map-value-ind': + case 'seq-item-ind': + if (this.atNewLine) + this.indent += source.length; + break; + case 'doc-mode': + case 'flow-error-end': + return; + default: + this.atNewLine = false; + } + this.offset += source.length; + } + } + /** Call at end of input to push out any remaining constructions */ + *end() { + while (this.stack.length > 0) + yield* this.pop(); + } + get sourceToken() { + const st = { + type: this.type, + offset: this.offset, + indent: this.indent, + source: this.source + }; + return st; + } + *step() { + const top = this.peek(1); + if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) { + while (this.stack.length > 0) + yield* this.pop(); + this.stack.push({ + type: 'doc-end', + offset: this.offset, + source: this.source + }); + return; + } + if (!top) + return yield* this.stream(); + switch (top.type) { + case 'document': + return yield* this.document(top); + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return yield* this.scalar(top); + case 'block-scalar': + return yield* this.blockScalar(top); + case 'block-map': + return yield* this.blockMap(top); + case 'block-seq': + return yield* this.blockSequence(top); + case 'flow-collection': + return yield* this.flowCollection(top); + case 'doc-end': + return yield* this.documentEnd(top); + } + /* istanbul ignore next should not happen */ + yield* this.pop(); + } + peek(n) { + return this.stack[this.stack.length - n]; + } + *pop(error) { + const token = error ?? this.stack.pop(); + /* istanbul ignore if should not happen */ + if (!token) { + const message = 'Tried to pop an empty stack'; + yield { type: 'error', offset: this.offset, source: '', message }; + } + else if (this.stack.length === 0) { + yield token; + } + else { + const top = this.peek(1); + if (token.type === 'block-scalar') { + // Block scalars use their parent rather than header indent + token.indent = 'indent' in top ? top.indent : 0; + } + else if (token.type === 'flow-collection' && top.type === 'document') { + // Ignore all indent for top-level flow collections + token.indent = 0; + } + if (token.type === 'flow-collection') + fixFlowSeqItems(token); + switch (top.type) { + case 'document': + top.value = token; + break; + case 'block-scalar': + top.props.push(token); // error + break; + case 'block-map': { + const it = top.items[top.items.length - 1]; + if (it.value) { + top.items.push({ start: [], key: token, sep: [] }); + this.onKeyLine = true; + return; + } + else if (it.sep) { + it.value = token; + } + else { + Object.assign(it, { key: token, sep: [] }); + this.onKeyLine = !it.explicitKey; + return; + } + break; + } + case 'block-seq': { + const it = top.items[top.items.length - 1]; + if (it.value) + top.items.push({ start: [], value: token }); + else + it.value = token; + break; + } + case 'flow-collection': { + const it = top.items[top.items.length - 1]; + if (!it || it.value) + top.items.push({ start: [], key: token, sep: [] }); + else if (it.sep) + it.value = token; + else + Object.assign(it, { key: token, sep: [] }); + return; + } + /* istanbul ignore next should not happen */ + default: + yield* this.pop(); + yield* this.pop(token); + } + if ((top.type === 'document' || + top.type === 'block-map' || + top.type === 'block-seq') && + (token.type === 'block-map' || token.type === 'block-seq')) { + const last = token.items[token.items.length - 1]; + if (last && + !last.sep && + !last.value && + last.start.length > 0 && + findNonEmptyIndex(last.start) === -1 && + (token.indent === 0 || + last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) { + if (top.type === 'document') + top.end = last.start; + else + top.items.push({ start: last.start }); + token.items.splice(-1, 1); + } + } + } + } + *stream() { + switch (this.type) { + case 'directive-line': + yield { type: 'directive', offset: this.offset, source: this.source }; + return; + case 'byte-order-mark': + case 'space': + case 'comment': + case 'newline': + yield this.sourceToken; + return; + case 'doc-mode': + case 'doc-start': { + const doc = { + type: 'document', + offset: this.offset, + start: [] + }; + if (this.type === 'doc-start') + doc.start.push(this.sourceToken); + this.stack.push(doc); + return; + } + } + yield { + type: 'error', + offset: this.offset, + message: `Unexpected ${this.type} token in YAML stream`, + source: this.source + }; + } + *document(doc) { + if (doc.value) + return yield* this.lineEnd(doc); + switch (this.type) { + case 'doc-start': { + if (findNonEmptyIndex(doc.start) !== -1) { + yield* this.pop(); + yield* this.step(); + } + else + doc.start.push(this.sourceToken); + return; + } + case 'anchor': + case 'tag': + case 'space': + case 'comment': + case 'newline': + doc.start.push(this.sourceToken); + return; + } + const bv = this.startBlockValue(doc); + if (bv) + this.stack.push(bv); + else { + yield { + type: 'error', + offset: this.offset, + message: `Unexpected ${this.type} token in YAML document`, + source: this.source + }; + } + } + *scalar(scalar) { + if (this.type === 'map-value-ind') { + const prev = getPrevProps(this.peek(2)); + const start = getFirstKeyStartProps(prev); + let sep; + if (scalar.end) { + sep = scalar.end; + sep.push(this.sourceToken); + delete scalar.end; + } + else + sep = [this.sourceToken]; + const map = { + type: 'block-map', + offset: scalar.offset, + indent: scalar.indent, + items: [{ start, key: scalar, sep }] + }; + this.onKeyLine = true; + this.stack[this.stack.length - 1] = map; + } + else + yield* this.lineEnd(scalar); + } + *blockScalar(scalar) { + switch (this.type) { + case 'space': + case 'comment': + case 'newline': + scalar.props.push(this.sourceToken); + return; + case 'scalar': + scalar.source = this.source; + // block-scalar source includes trailing newline + this.atNewLine = true; + this.indent = 0; + if (this.onNewLine) { + let nl = this.source.indexOf('\n') + 1; + while (nl !== 0) { + this.onNewLine(this.offset + nl); + nl = this.source.indexOf('\n', nl) + 1; + } + } + yield* this.pop(); + break; + /* istanbul ignore next should not happen */ + default: + yield* this.pop(); + yield* this.step(); + } + } + *blockMap(map) { + const it = map.items[map.items.length - 1]; + // it.sep is true-ish if pair already has key or : separator + switch (this.type) { + case 'newline': + this.onKeyLine = false; + if (it.value) { + const end = 'end' in it.value ? it.value.end : undefined; + const last = Array.isArray(end) ? end[end.length - 1] : undefined; + if (last?.type === 'comment') + end?.push(this.sourceToken); + else + map.items.push({ start: [this.sourceToken] }); + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + it.start.push(this.sourceToken); + } + return; + case 'space': + case 'comment': + if (it.value) { + map.items.push({ start: [this.sourceToken] }); + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + if (this.atIndentedComment(it.start, map.indent)) { + const prev = map.items[map.items.length - 2]; + const end = prev?.value?.end; + if (Array.isArray(end)) { + Array.prototype.push.apply(end, it.start); + end.push(this.sourceToken); + map.items.pop(); + return; + } + } + it.start.push(this.sourceToken); + } + return; + } + if (this.indent >= map.indent) { + const atMapIndent = !this.onKeyLine && this.indent === map.indent; + const atNextItem = atMapIndent && + (it.sep || it.explicitKey) && + this.type !== 'seq-item-ind'; + // For empty nodes, assign newline-separated not indented empty tokens to following node + let start = []; + if (atNextItem && it.sep && !it.value) { + const nl = []; + for (let i = 0; i < it.sep.length; ++i) { + const st = it.sep[i]; + switch (st.type) { + case 'newline': + nl.push(i); + break; + case 'space': + break; + case 'comment': + if (st.indent > map.indent) + nl.length = 0; + break; + default: + nl.length = 0; + } + } + if (nl.length >= 2) + start = it.sep.splice(nl[1]); + } + switch (this.type) { + case 'anchor': + case 'tag': + if (atNextItem || it.value) { + start.push(this.sourceToken); + map.items.push({ start }); + this.onKeyLine = true; + } + else if (it.sep) { + it.sep.push(this.sourceToken); + } + else { + it.start.push(this.sourceToken); + } + return; + case 'explicit-key-ind': + if (!it.sep && !it.explicitKey) { + it.start.push(this.sourceToken); + it.explicitKey = true; + } + else if (atNextItem || it.value) { + start.push(this.sourceToken); + map.items.push({ start, explicitKey: true }); + } + else { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start: [this.sourceToken], explicitKey: true }] + }); + } + this.onKeyLine = true; + return; + case 'map-value-ind': + if (it.explicitKey) { + if (!it.sep) { + if (includesToken(it.start, 'newline')) { + Object.assign(it, { key: null, sep: [this.sourceToken] }); + } + else { + const start = getFirstKeyStartProps(it.start); + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }); + } + } + else if (it.value) { + map.items.push({ start: [], key: null, sep: [this.sourceToken] }); + } + else if (includesToken(it.sep, 'map-value-ind')) { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }); + } + else if (isFlowToken(it.key) && + !includesToken(it.sep, 'newline')) { + const start = getFirstKeyStartProps(it.start); + const key = it.key; + const sep = it.sep; + sep.push(this.sourceToken); + // @ts-expect-error type guard is wrong here + delete it.key; + // @ts-expect-error type guard is wrong here + delete it.sep; + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key, sep }] + }); + } + else if (start.length > 0) { + // Not actually at next item + it.sep = it.sep.concat(start, this.sourceToken); + } + else { + it.sep.push(this.sourceToken); + } + } + else { + if (!it.sep) { + Object.assign(it, { key: null, sep: [this.sourceToken] }); + } + else if (it.value || atNextItem) { + map.items.push({ start, key: null, sep: [this.sourceToken] }); + } + else if (includesToken(it.sep, 'map-value-ind')) { + this.stack.push({ + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start: [], key: null, sep: [this.sourceToken] }] + }); + } + else { + it.sep.push(this.sourceToken); + } + } + this.onKeyLine = true; + return; + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': { + const fs = this.flowScalar(this.type); + if (atNextItem || it.value) { + map.items.push({ start, key: fs, sep: [] }); + this.onKeyLine = true; + } + else if (it.sep) { + this.stack.push(fs); + } + else { + Object.assign(it, { key: fs, sep: [] }); + this.onKeyLine = true; + } + return; + } + default: { + const bv = this.startBlockValue(map); + if (bv) { + if (atMapIndent && bv.type !== 'block-seq') { + map.items.push({ start }); + } + this.stack.push(bv); + return; + } + } + } + } + yield* this.pop(); + yield* this.step(); + } + *blockSequence(seq) { + const it = seq.items[seq.items.length - 1]; + switch (this.type) { + case 'newline': + if (it.value) { + const end = 'end' in it.value ? it.value.end : undefined; + const last = Array.isArray(end) ? end[end.length - 1] : undefined; + if (last?.type === 'comment') + end?.push(this.sourceToken); + else + seq.items.push({ start: [this.sourceToken] }); + } + else + it.start.push(this.sourceToken); + return; + case 'space': + case 'comment': + if (it.value) + seq.items.push({ start: [this.sourceToken] }); + else { + if (this.atIndentedComment(it.start, seq.indent)) { + const prev = seq.items[seq.items.length - 2]; + const end = prev?.value?.end; + if (Array.isArray(end)) { + Array.prototype.push.apply(end, it.start); + end.push(this.sourceToken); + seq.items.pop(); + return; + } + } + it.start.push(this.sourceToken); + } + return; + case 'anchor': + case 'tag': + if (it.value || this.indent <= seq.indent) + break; + it.start.push(this.sourceToken); + return; + case 'seq-item-ind': + if (this.indent !== seq.indent) + break; + if (it.value || includesToken(it.start, 'seq-item-ind')) + seq.items.push({ start: [this.sourceToken] }); + else + it.start.push(this.sourceToken); + return; + } + if (this.indent > seq.indent) { + const bv = this.startBlockValue(seq); + if (bv) { + this.stack.push(bv); + return; + } + } + yield* this.pop(); + yield* this.step(); + } + *flowCollection(fc) { + const it = fc.items[fc.items.length - 1]; + if (this.type === 'flow-error-end') { + let top; + do { + yield* this.pop(); + top = this.peek(1); + } while (top && top.type === 'flow-collection'); + } + else if (fc.end.length === 0) { + switch (this.type) { + case 'comma': + case 'explicit-key-ind': + if (!it || it.sep) + fc.items.push({ start: [this.sourceToken] }); + else + it.start.push(this.sourceToken); + return; + case 'map-value-ind': + if (!it || it.value) + fc.items.push({ start: [], key: null, sep: [this.sourceToken] }); + else if (it.sep) + it.sep.push(this.sourceToken); + else + Object.assign(it, { key: null, sep: [this.sourceToken] }); + return; + case 'space': + case 'comment': + case 'newline': + case 'anchor': + case 'tag': + if (!it || it.value) + fc.items.push({ start: [this.sourceToken] }); + else if (it.sep) + it.sep.push(this.sourceToken); + else + it.start.push(this.sourceToken); + return; + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': { + const fs = this.flowScalar(this.type); + if (!it || it.value) + fc.items.push({ start: [], key: fs, sep: [] }); + else if (it.sep) + this.stack.push(fs); + else + Object.assign(it, { key: fs, sep: [] }); + return; + } + case 'flow-map-end': + case 'flow-seq-end': + fc.end.push(this.sourceToken); + return; + } + const bv = this.startBlockValue(fc); + /* istanbul ignore else should not happen */ + if (bv) + this.stack.push(bv); + else { + yield* this.pop(); + yield* this.step(); + } + } + else { + const parent = this.peek(2); + if (parent.type === 'block-map' && + ((this.type === 'map-value-ind' && parent.indent === fc.indent) || + (this.type === 'newline' && + !parent.items[parent.items.length - 1].sep))) { + yield* this.pop(); + yield* this.step(); + } + else if (this.type === 'map-value-ind' && + parent.type !== 'flow-collection') { + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + fixFlowSeqItems(fc); + const sep = fc.end.splice(1, fc.end.length); + sep.push(this.sourceToken); + const map = { + type: 'block-map', + offset: fc.offset, + indent: fc.indent, + items: [{ start, key: fc, sep }] + }; + this.onKeyLine = true; + this.stack[this.stack.length - 1] = map; + } + else { + yield* this.lineEnd(fc); + } + } + } + flowScalar(type) { + if (this.onNewLine) { + let nl = this.source.indexOf('\n') + 1; + while (nl !== 0) { + this.onNewLine(this.offset + nl); + nl = this.source.indexOf('\n', nl) + 1; + } + } + return { + type, + offset: this.offset, + indent: this.indent, + source: this.source + }; + } + startBlockValue(parent) { + switch (this.type) { + case 'alias': + case 'scalar': + case 'single-quoted-scalar': + case 'double-quoted-scalar': + return this.flowScalar(this.type); + case 'block-scalar-header': + return { + type: 'block-scalar', + offset: this.offset, + indent: this.indent, + props: [this.sourceToken], + source: '' + }; + case 'flow-map-start': + case 'flow-seq-start': + return { + type: 'flow-collection', + offset: this.offset, + indent: this.indent, + start: this.sourceToken, + items: [], + end: [] + }; + case 'seq-item-ind': + return { + type: 'block-seq', + offset: this.offset, + indent: this.indent, + items: [{ start: [this.sourceToken] }] + }; + case 'explicit-key-ind': { + this.onKeyLine = true; + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + start.push(this.sourceToken); + return { + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, explicitKey: true }] + }; + } + case 'map-value-ind': { + this.onKeyLine = true; + const prev = getPrevProps(parent); + const start = getFirstKeyStartProps(prev); + return { + type: 'block-map', + offset: this.offset, + indent: this.indent, + items: [{ start, key: null, sep: [this.sourceToken] }] + }; + } + } + return null; + } + atIndentedComment(start, indent) { + if (this.type !== 'comment') + return false; + if (this.indent <= indent) + return false; + return start.every(st => st.type === 'newline' || st.type === 'space'); + } + *documentEnd(docEnd) { + if (this.type !== 'doc-mode') { + if (docEnd.end) + docEnd.end.push(this.sourceToken); + else + docEnd.end = [this.sourceToken]; + if (this.type === 'newline') + yield* this.pop(); + } + } + *lineEnd(token) { + switch (this.type) { + case 'comma': + case 'doc-start': + case 'doc-end': + case 'flow-seq-end': + case 'flow-map-end': + case 'map-value-ind': + yield* this.pop(); + yield* this.step(); + break; + case 'newline': + this.onKeyLine = false; + // fallthrough + case 'space': + case 'comment': + default: + // all other values are errors + if (token.end) + token.end.push(this.sourceToken); + else + token.end = [this.sourceToken]; + if (this.type === 'newline') + yield* this.pop(); + } + } +} + +exports.Parser = Parser; + + +/***/ }), + +/***/ 8649: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var composer = __nccwpck_require__(9493); +var Document = __nccwpck_require__(42); +var errors = __nccwpck_require__(4236); +var log = __nccwpck_require__(6909); +var identity = __nccwpck_require__(5589); +var lineCounter = __nccwpck_require__(1929); +var parser = __nccwpck_require__(3328); + +function parseOptions(options) { + const prettyErrors = options.prettyErrors !== false; + const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null; + return { lineCounter: lineCounter$1, prettyErrors }; +} +/** + * Parse the input as a stream of YAML documents. + * + * Documents should be separated from each other by `...` or `---` marker lines. + * + * @returns If an empty `docs` array is returned, it will be of type + * EmptyStream and contain additional stream information. In + * TypeScript, you should use `'empty' in docs` as a type guard for it. + */ +function parseAllDocuments(source, options = {}) { + const { lineCounter, prettyErrors } = parseOptions(options); + const parser$1 = new parser.Parser(lineCounter?.addNewLine); + const composer$1 = new composer.Composer(options); + const docs = Array.from(composer$1.compose(parser$1.parse(source))); + if (prettyErrors && lineCounter) + for (const doc of docs) { + doc.errors.forEach(errors.prettifyError(source, lineCounter)); + doc.warnings.forEach(errors.prettifyError(source, lineCounter)); + } + if (docs.length > 0) + return docs; + return Object.assign([], { empty: true }, composer$1.streamInfo()); +} +/** Parse an input string into a single YAML.Document */ +function parseDocument(source, options = {}) { + const { lineCounter, prettyErrors } = parseOptions(options); + const parser$1 = new parser.Parser(lineCounter?.addNewLine); + const composer$1 = new composer.Composer(options); + // `doc` is always set by compose.end(true) at the very latest + let doc = null; + for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) { + if (!doc) + doc = _doc; + else if (doc.options.logLevel !== 'silent') { + doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()')); + break; + } + } + if (prettyErrors && lineCounter) { + doc.errors.forEach(errors.prettifyError(source, lineCounter)); + doc.warnings.forEach(errors.prettifyError(source, lineCounter)); + } + return doc; +} +function parse(src, reviver, options) { + let _reviver = undefined; + if (typeof reviver === 'function') { + _reviver = reviver; + } + else if (options === undefined && reviver && typeof reviver === 'object') { + options = reviver; + } + const doc = parseDocument(src, options); + if (!doc) + return null; + doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning)); + if (doc.errors.length > 0) { + if (doc.options.logLevel !== 'silent') + throw doc.errors[0]; + else + doc.errors = []; + } + return doc.toJS(Object.assign({ reviver: _reviver }, options)); +} +function stringify(value, replacer, options) { + let _replacer = null; + if (typeof replacer === 'function' || Array.isArray(replacer)) { + _replacer = replacer; + } + else if (options === undefined && replacer) { + options = replacer; + } + if (typeof options === 'string') + options = options.length; + if (typeof options === 'number') { + const indent = Math.round(options); + options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent }; + } + if (value === undefined) { + const { keepUndefined } = options ?? replacer ?? {}; + if (!keepUndefined) + return undefined; + } + if (identity.isDocument(value) && !_replacer) + return value.toString(options); + return new Document.Document(value, _replacer, options).toString(options); +} + +exports.parse = parse; +exports.parseAllDocuments = parseAllDocuments; +exports.parseDocument = parseDocument; +exports.stringify = stringify; + + +/***/ }), + +/***/ 6831: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var map = __nccwpck_require__(83); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var tags = __nccwpck_require__(4138); + +const sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0; +class Schema { + constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) { + this.compat = Array.isArray(compat) + ? tags.getTags(compat, 'compat') + : compat + ? tags.getTags(null, compat) + : null; + this.name = (typeof schema === 'string' && schema) || 'core'; + this.knownTags = resolveKnownTags ? tags.coreKnownTags : {}; + this.tags = tags.getTags(customTags, this.name, merge); + this.toStringOptions = toStringDefaults ?? null; + Object.defineProperty(this, identity.MAP, { value: map.map }); + Object.defineProperty(this, identity.SCALAR, { value: string.string }); + Object.defineProperty(this, identity.SEQ, { value: seq.seq }); + // Used by createMap() + this.sortMapEntries = + typeof sortMapEntries === 'function' + ? sortMapEntries + : sortMapEntries === true + ? sortMapEntriesByKey + : null; + } + clone() { + const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this)); + copy.tags = this.tags.slice(); + return copy; + } +} + +exports.Schema = Schema; + + +/***/ }), + +/***/ 83: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var YAMLMap = __nccwpck_require__(6011); + +const map = { + collection: 'map', + default: true, + nodeClass: YAMLMap.YAMLMap, + tag: 'tag:yaml.org,2002:map', + resolve(map, onError) { + if (!identity.isMap(map)) + onError('Expected a mapping for this tag'); + return map; + }, + createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx) +}; + +exports.map = map; + + +/***/ }), + +/***/ 6703: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +const nullTag = { + identify: value => value == null, + createNode: () => new Scalar.Scalar(null), + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^(?:~|[Nn]ull|NULL)?$/, + resolve: () => new Scalar.Scalar(null), + stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source) + ? source + : ctx.options.nullStr +}; + +exports.nullTag = nullTag; + + +/***/ }), + +/***/ 1693: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var YAMLSeq = __nccwpck_require__(5161); + +const seq = { + collection: 'seq', + default: true, + nodeClass: YAMLSeq.YAMLSeq, + tag: 'tag:yaml.org,2002:seq', + resolve(seq, onError) { + if (!identity.isSeq(seq)) + onError('Expected a sequence for this tag'); + return seq; + }, + createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx) +}; + +exports.seq = seq; + + +/***/ }), + +/***/ 2201: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyString = __nccwpck_require__(6226); + +const string = { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: str => str, + stringify(item, ctx, onComment, onChompKeep) { + ctx = Object.assign({ actualString: true }, ctx); + return stringifyString.stringifyString(item, ctx, onComment, onChompKeep); + } +}; + +exports.string = string; + + +/***/ }), + +/***/ 2045: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +const boolTag = { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/, + resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'), + stringify({ source, value }, ctx) { + if (source && boolTag.test.test(source)) { + const sv = source[0] === 't' || source[0] === 'T'; + if (value === sv) + return source; + } + return value ? ctx.options.trueStr : ctx.options.falseStr; + } +}; + +exports.boolTag = boolTag; + + +/***/ }), + +/***/ 6810: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyNumber = __nccwpck_require__(4174); + +const floatNaN = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, + resolve: str => str.slice(-3).toLowerCase() === 'nan' + ? NaN + : str[0] === '-' + ? Number.NEGATIVE_INFINITY + : Number.POSITIVE_INFINITY, + stringify: stringifyNumber.stringifyNumber +}; +const floatExp = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:\.[0-9]+|[0-9]+(?:\.[0-9]*)?)[eE][-+]?[0-9]+$/, + resolve: str => parseFloat(str), + stringify(node) { + const num = Number(node.value); + return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); + } +}; +const float = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:\.[0-9]+|[0-9]+\.[0-9]*)$/, + resolve(str) { + const node = new Scalar.Scalar(parseFloat(str)); + const dot = str.indexOf('.'); + if (dot !== -1 && str[str.length - 1] === '0') + node.minFractionDigits = str.length - dot - 1; + return node; + }, + stringify: stringifyNumber.stringifyNumber +}; + +exports.float = float; +exports.floatExp = floatExp; +exports.floatNaN = floatNaN; + + +/***/ }), + +/***/ 3019: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); +const intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix)); +function intStringify(node, radix, prefix) { + const { value } = node; + if (intIdentify(value) && value >= 0) + return prefix + value.toString(radix); + return stringifyNumber.stringifyNumber(node); +} +const intOct = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^0o[0-7]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt), + stringify: node => intStringify(node, 8, '0o') +}; +const int = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9]+$/, + resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), + stringify: stringifyNumber.stringifyNumber +}; +const intHex = { + identify: value => intIdentify(value) && value >= 0, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^0x[0-9a-fA-F]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), + stringify: node => intStringify(node, 16, '0x') +}; + +exports.int = int; +exports.intHex = intHex; +exports.intOct = intOct; + + +/***/ }), + +/***/ 27: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var bool = __nccwpck_require__(2045); +var float = __nccwpck_require__(6810); +var int = __nccwpck_require__(3019); + +const schema = [ + map.map, + seq.seq, + string.string, + _null.nullTag, + bool.boolTag, + int.intOct, + int.int, + int.intHex, + float.floatNaN, + float.floatExp, + float.float +]; + +exports.schema = schema; + + +/***/ }), + +/***/ 4545: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var map = __nccwpck_require__(83); +var seq = __nccwpck_require__(1693); + +function intIdentify(value) { + return typeof value === 'bigint' || Number.isInteger(value); +} +const stringifyJSON = ({ value }) => JSON.stringify(value); +const jsonScalars = [ + { + identify: value => typeof value === 'string', + default: true, + tag: 'tag:yaml.org,2002:str', + resolve: str => str, + stringify: stringifyJSON + }, + { + identify: value => value == null, + createNode: () => new Scalar.Scalar(null), + default: true, + tag: 'tag:yaml.org,2002:null', + test: /^null$/, + resolve: () => null, + stringify: stringifyJSON + }, + { + identify: value => typeof value === 'boolean', + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^true$|^false$/, + resolve: str => str === 'true', + stringify: stringifyJSON + }, + { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^-?(?:0|[1-9][0-9]*)$/, + resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10), + stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value) + }, + { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^-?(?:0|[1-9][0-9]*)(?:\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/, + resolve: str => parseFloat(str), + stringify: stringifyJSON + } +]; +const jsonError = { + default: true, + tag: '', + test: /^/, + resolve(str, onError) { + onError(`Unresolved plain scalar ${JSON.stringify(str)}`); + return str; + } +}; +const schema = [map.map, seq.seq].concat(jsonScalars, jsonError); + +exports.schema = schema; + + +/***/ }), + +/***/ 4138: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var bool = __nccwpck_require__(2045); +var float = __nccwpck_require__(6810); +var int = __nccwpck_require__(3019); +var schema = __nccwpck_require__(27); +var schema$1 = __nccwpck_require__(4545); +var binary = __nccwpck_require__(5724); +var merge = __nccwpck_require__(9614); +var omap = __nccwpck_require__(8974); +var pairs = __nccwpck_require__(9841); +var schema$2 = __nccwpck_require__(5389); +var set = __nccwpck_require__(7847); +var timestamp = __nccwpck_require__(1156); + +const schemas = new Map([ + ['core', schema.schema], + ['failsafe', [map.map, seq.seq, string.string]], + ['json', schema$1.schema], + ['yaml11', schema$2.schema], + ['yaml-1.1', schema$2.schema] +]); +const tagsByName = { + binary: binary.binary, + bool: bool.boolTag, + float: float.float, + floatExp: float.floatExp, + floatNaN: float.floatNaN, + floatTime: timestamp.floatTime, + int: int.int, + intHex: int.intHex, + intOct: int.intOct, + intTime: timestamp.intTime, + map: map.map, + merge: merge.merge, + null: _null.nullTag, + omap: omap.omap, + pairs: pairs.pairs, + seq: seq.seq, + set: set.set, + timestamp: timestamp.timestamp +}; +const coreKnownTags = { + 'tag:yaml.org,2002:binary': binary.binary, + 'tag:yaml.org,2002:merge': merge.merge, + 'tag:yaml.org,2002:omap': omap.omap, + 'tag:yaml.org,2002:pairs': pairs.pairs, + 'tag:yaml.org,2002:set': set.set, + 'tag:yaml.org,2002:timestamp': timestamp.timestamp +}; +function getTags(customTags, schemaName, addMergeTag) { + const schemaTags = schemas.get(schemaName); + if (schemaTags && !customTags) { + return addMergeTag && !schemaTags.includes(merge.merge) + ? schemaTags.concat(merge.merge) + : schemaTags.slice(); + } + let tags = schemaTags; + if (!tags) { + if (Array.isArray(customTags)) + tags = []; + else { + const keys = Array.from(schemas.keys()) + .filter(key => key !== 'yaml11') + .map(key => JSON.stringify(key)) + .join(', '); + throw new Error(`Unknown schema "${schemaName}"; use one of ${keys} or define customTags array`); + } + } + if (Array.isArray(customTags)) { + for (const tag of customTags) + tags = tags.concat(tag); + } + else if (typeof customTags === 'function') { + tags = customTags(tags.slice()); + } + if (addMergeTag) + tags = tags.concat(merge.merge); + return tags.reduce((tags, tag) => { + const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag; + if (!tagObj) { + const tagName = JSON.stringify(tag); + const keys = Object.keys(tagsByName) + .map(key => JSON.stringify(key)) + .join(', '); + throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`); + } + if (!tags.includes(tagObj)) + tags.push(tagObj); + return tags; + }, []); +} + +exports.coreKnownTags = coreKnownTags; +exports.getTags = getTags; + + +/***/ }), + +/***/ 5724: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyString = __nccwpck_require__(6226); + +const binary = { + identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array + default: false, + tag: 'tag:yaml.org,2002:binary', + /** + * Returns a Buffer in node and an Uint8Array in browsers + * + * To use the resulting buffer as an image, you'll want to do something like: + * + * const blob = new Blob([buffer], { type: 'image/jpeg' }) + * document.querySelector('#photo').src = URL.createObjectURL(blob) + */ + resolve(src, onError) { + if (typeof Buffer === 'function') { + return Buffer.from(src, 'base64'); + } + else if (typeof atob === 'function') { + // On IE 11, atob() can't handle newlines + const str = atob(src.replace(/[\n\r]/g, '')); + const buffer = new Uint8Array(str.length); + for (let i = 0; i < str.length; ++i) + buffer[i] = str.charCodeAt(i); + return buffer; + } + else { + onError('This environment does not support reading binary tags; either Buffer or atob is required'); + return src; + } + }, + stringify({ comment, type, value }, ctx, onComment, onChompKeep) { + const buf = value; // checked earlier by binary.identify() + let str; + if (typeof Buffer === 'function') { + str = + buf instanceof Buffer + ? buf.toString('base64') + : Buffer.from(buf.buffer).toString('base64'); + } + else if (typeof btoa === 'function') { + let s = ''; + for (let i = 0; i < buf.length; ++i) + s += String.fromCharCode(buf[i]); + str = btoa(s); + } + else { + throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required'); + } + if (!type) + type = Scalar.Scalar.BLOCK_LITERAL; + if (type !== Scalar.Scalar.QUOTE_DOUBLE) { + const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth); + const n = Math.ceil(str.length / lineWidth); + const lines = new Array(n); + for (let i = 0, o = 0; i < n; ++i, o += lineWidth) { + lines[i] = str.substr(o, lineWidth); + } + str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\n' : ' '); + } + return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep); + } +}; + +exports.binary = binary; + + +/***/ }), + +/***/ 2631: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); + +function boolStringify({ value, source }, ctx) { + const boolObj = value ? trueTag : falseTag; + if (source && boolObj.test.test(source)) + return source; + return value ? ctx.options.trueStr : ctx.options.falseStr; +} +const trueTag = { + identify: value => value === true, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/, + resolve: () => new Scalar.Scalar(true), + stringify: boolStringify +}; +const falseTag = { + identify: value => value === false, + default: true, + tag: 'tag:yaml.org,2002:bool', + test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/, + resolve: () => new Scalar.Scalar(false), + stringify: boolStringify +}; + +exports.falseTag = falseTag; +exports.trueTag = trueTag; + + +/***/ }), + +/***/ 8035: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var stringifyNumber = __nccwpck_require__(4174); + +const floatNaN = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^(?:[-+]?\.(?:inf|Inf|INF)|\.nan|\.NaN|\.NAN)$/, + resolve: (str) => str.slice(-3).toLowerCase() === 'nan' + ? NaN + : str[0] === '-' + ? Number.NEGATIVE_INFINITY + : Number.POSITIVE_INFINITY, + stringify: stringifyNumber.stringifyNumber +}; +const floatExp = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'EXP', + test: /^[-+]?(?:[0-9][0-9_]*)?(?:\.[0-9_]*)?[eE][-+]?[0-9]+$/, + resolve: (str) => parseFloat(str.replace(/_/g, '')), + stringify(node) { + const num = Number(node.value); + return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node); + } +}; +const float = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + test: /^[-+]?(?:[0-9][0-9_]*)?\.[0-9_]*$/, + resolve(str) { + const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, ''))); + const dot = str.indexOf('.'); + if (dot !== -1) { + const f = str.substring(dot + 1).replace(/_/g, ''); + if (f[f.length - 1] === '0') + node.minFractionDigits = f.length; + } + return node; + }, + stringify: stringifyNumber.stringifyNumber +}; + +exports.float = float; +exports.floatExp = floatExp; +exports.floatNaN = floatNaN; + + +/***/ }), + +/***/ 9503: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +const intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value); +function intResolve(str, offset, radix, { intAsBigInt }) { + const sign = str[0]; + if (sign === '-' || sign === '+') + offset += 1; + str = str.substring(offset).replace(/_/g, ''); + if (intAsBigInt) { + switch (radix) { + case 2: + str = `0b${str}`; + break; + case 8: + str = `0o${str}`; + break; + case 16: + str = `0x${str}`; + break; + } + const n = BigInt(str); + return sign === '-' ? BigInt(-1) * n : n; + } + const n = parseInt(str, radix); + return sign === '-' ? -1 * n : n; +} +function intStringify(node, radix, prefix) { + const { value } = node; + if (intIdentify(value)) { + const str = value.toString(radix); + return value < 0 ? '-' + prefix + str.substr(1) : prefix + str; + } + return stringifyNumber.stringifyNumber(node); +} +const intBin = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'BIN', + test: /^[-+]?0b[0-1_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt), + stringify: node => intStringify(node, 2, '0b') +}; +const intOct = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'OCT', + test: /^[-+]?0[0-7_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt), + stringify: node => intStringify(node, 8, '0') +}; +const int = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + test: /^[-+]?[0-9][0-9_]*$/, + resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt), + stringify: stringifyNumber.stringifyNumber +}; +const intHex = { + identify: intIdentify, + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'HEX', + test: /^[-+]?0x[0-9a-fA-F_]+$/, + resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt), + stringify: node => intStringify(node, 16, '0x') +}; + +exports.int = int; +exports.intBin = intBin; +exports.intHex = intHex; +exports.intOct = intOct; + + +/***/ }), + +/***/ 9614: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); + +// If the value associated with a merge key is a single mapping node, each of +// its key/value pairs is inserted into the current mapping, unless the key +// already exists in it. If the value associated with the merge key is a +// sequence, then this sequence is expected to contain mapping nodes and each +// of these nodes is merged in turn according to its order in the sequence. +// Keys in mapping nodes earlier in the sequence override keys specified in +// later mapping nodes. -- http://yaml.org/type/merge.html +const MERGE_KEY = '<<'; +const merge = { + identify: value => value === MERGE_KEY || + (typeof value === 'symbol' && value.description === MERGE_KEY), + default: 'key', + tag: 'tag:yaml.org,2002:merge', + test: /^<<$/, + resolve: () => Object.assign(new Scalar.Scalar(Symbol(MERGE_KEY)), { + addToJSMap: addMergeToJSMap + }), + stringify: () => MERGE_KEY +}; +const isMergeKey = (ctx, key) => (merge.identify(key) || + (identity.isScalar(key) && + (!key.type || key.type === Scalar.Scalar.PLAIN) && + merge.identify(key.value))) && + ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default); +function addMergeToJSMap(ctx, map, value) { + value = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; + if (identity.isSeq(value)) + for (const it of value.items) + mergeValue(ctx, map, it); + else if (Array.isArray(value)) + for (const it of value) + mergeValue(ctx, map, it); + else + mergeValue(ctx, map, value); +} +function mergeValue(ctx, map, value) { + const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value; + if (!identity.isMap(source)) + throw new Error('Merge sources must be maps or map aliases'); + const srcMap = source.toJSON(null, ctx, Map); + for (const [key, value] of srcMap) { + if (map instanceof Map) { + if (!map.has(key)) + map.set(key, value); + } + else if (map instanceof Set) { + map.add(key); + } + else if (!Object.prototype.hasOwnProperty.call(map, key)) { + Object.defineProperty(map, key, { + value, + writable: true, + enumerable: true, + configurable: true + }); + } + } + return map; +} + +exports.addMergeToJSMap = addMergeToJSMap; +exports.isMergeKey = isMergeKey; +exports.merge = merge; + + +/***/ }), + +/***/ 8974: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var toJS = __nccwpck_require__(2463); +var YAMLMap = __nccwpck_require__(6011); +var YAMLSeq = __nccwpck_require__(5161); +var pairs = __nccwpck_require__(9841); + +class YAMLOMap extends YAMLSeq.YAMLSeq { + constructor() { + super(); + this.add = YAMLMap.YAMLMap.prototype.add.bind(this); + this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this); + this.get = YAMLMap.YAMLMap.prototype.get.bind(this); + this.has = YAMLMap.YAMLMap.prototype.has.bind(this); + this.set = YAMLMap.YAMLMap.prototype.set.bind(this); + this.tag = YAMLOMap.tag; + } + /** + * If `ctx` is given, the return type is actually `Map`, + * but TypeScript won't allow widening the signature of a child method. + */ + toJSON(_, ctx) { + if (!ctx) + return super.toJSON(_); + const map = new Map(); + if (ctx?.onCreate) + ctx.onCreate(map); + for (const pair of this.items) { + let key, value; + if (identity.isPair(pair)) { + key = toJS.toJS(pair.key, '', ctx); + value = toJS.toJS(pair.value, key, ctx); + } + else { + key = toJS.toJS(pair, '', ctx); + } + if (map.has(key)) + throw new Error('Ordered maps must not include duplicate keys'); + map.set(key, value); + } + return map; + } + static from(schema, iterable, ctx) { + const pairs$1 = pairs.createPairs(schema, iterable, ctx); + const omap = new this(); + omap.items = pairs$1.items; + return omap; + } +} +YAMLOMap.tag = 'tag:yaml.org,2002:omap'; +const omap = { + collection: 'seq', + identify: value => value instanceof Map, + nodeClass: YAMLOMap, + default: false, + tag: 'tag:yaml.org,2002:omap', + resolve(seq, onError) { + const pairs$1 = pairs.resolvePairs(seq, onError); + const seenKeys = []; + for (const { key } of pairs$1.items) { + if (identity.isScalar(key)) { + if (seenKeys.includes(key.value)) { + onError(`Ordered maps must not include duplicate keys: ${key.value}`); + } + else { + seenKeys.push(key.value); + } + } + } + return Object.assign(new YAMLOMap(), pairs$1); + }, + createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx) +}; + +exports.YAMLOMap = YAMLOMap; +exports.omap = omap; + + +/***/ }), + +/***/ 9841: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var Scalar = __nccwpck_require__(9338); +var YAMLSeq = __nccwpck_require__(5161); + +function resolvePairs(seq, onError) { + if (identity.isSeq(seq)) { + for (let i = 0; i < seq.items.length; ++i) { + let item = seq.items[i]; + if (identity.isPair(item)) + continue; + else if (identity.isMap(item)) { + if (item.items.length > 1) + onError('Each pair must have its own sequence indicator'); + const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null)); + if (item.commentBefore) + pair.key.commentBefore = pair.key.commentBefore + ? `${item.commentBefore}\n${pair.key.commentBefore}` + : item.commentBefore; + if (item.comment) { + const cn = pair.value ?? pair.key; + cn.comment = cn.comment + ? `${item.comment}\n${cn.comment}` + : item.comment; + } + item = pair; + } + seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item); + } + } + else + onError('Expected a sequence for this tag'); + return seq; +} +function createPairs(schema, iterable, ctx) { + const { replacer } = ctx; + const pairs = new YAMLSeq.YAMLSeq(schema); + pairs.tag = 'tag:yaml.org,2002:pairs'; + let i = 0; + if (iterable && Symbol.iterator in Object(iterable)) + for (let it of iterable) { + if (typeof replacer === 'function') + it = replacer.call(iterable, String(i++), it); + let key, value; + if (Array.isArray(it)) { + if (it.length === 2) { + key = it[0]; + value = it[1]; + } + else + throw new TypeError(`Expected [key, value] tuple: ${it}`); + } + else if (it && it instanceof Object) { + const keys = Object.keys(it); + if (keys.length === 1) { + key = keys[0]; + value = it[key]; + } + else { + throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`); + } + } + else { + key = it; + } + pairs.items.push(Pair.createPair(key, value, ctx)); + } + return pairs; +} +const pairs = { + collection: 'seq', + default: false, + tag: 'tag:yaml.org,2002:pairs', + resolve: resolvePairs, + createNode: createPairs +}; + +exports.createPairs = createPairs; +exports.pairs = pairs; +exports.resolvePairs = resolvePairs; + + +/***/ }), + +/***/ 5389: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var map = __nccwpck_require__(83); +var _null = __nccwpck_require__(6703); +var seq = __nccwpck_require__(1693); +var string = __nccwpck_require__(2201); +var binary = __nccwpck_require__(5724); +var bool = __nccwpck_require__(2631); +var float = __nccwpck_require__(8035); +var int = __nccwpck_require__(9503); +var merge = __nccwpck_require__(9614); +var omap = __nccwpck_require__(8974); +var pairs = __nccwpck_require__(9841); +var set = __nccwpck_require__(7847); +var timestamp = __nccwpck_require__(1156); + +const schema = [ + map.map, + seq.seq, + string.string, + _null.nullTag, + bool.trueTag, + bool.falseTag, + int.intBin, + int.intOct, + int.int, + int.intHex, + float.floatNaN, + float.floatExp, + float.float, + binary.binary, + merge.merge, + omap.omap, + pairs.pairs, + set.set, + timestamp.intTime, + timestamp.floatTime, + timestamp.timestamp +]; + +exports.schema = schema; + + +/***/ }), + +/***/ 7847: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Pair = __nccwpck_require__(246); +var YAMLMap = __nccwpck_require__(6011); + +class YAMLSet extends YAMLMap.YAMLMap { + constructor(schema) { + super(schema); + this.tag = YAMLSet.tag; + } + add(key) { + let pair; + if (identity.isPair(key)) + pair = key; + else if (key && + typeof key === 'object' && + 'key' in key && + 'value' in key && + key.value === null) + pair = new Pair.Pair(key.key, null); + else + pair = new Pair.Pair(key, null); + const prev = YAMLMap.findPair(this.items, pair.key); + if (!prev) + this.items.push(pair); + } + /** + * If `keepPair` is `true`, returns the Pair matching `key`. + * Otherwise, returns the value of that Pair's key. + */ + get(key, keepPair) { + const pair = YAMLMap.findPair(this.items, key); + return !keepPair && identity.isPair(pair) + ? identity.isScalar(pair.key) + ? pair.key.value + : pair.key + : pair; + } + set(key, value) { + if (typeof value !== 'boolean') + throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`); + const prev = YAMLMap.findPair(this.items, key); + if (prev && !value) { + this.items.splice(this.items.indexOf(prev), 1); + } + else if (!prev && value) { + this.items.push(new Pair.Pair(key)); + } + } + toJSON(_, ctx) { + return super.toJSON(_, ctx, Set); + } + toString(ctx, onComment, onChompKeep) { + if (!ctx) + return JSON.stringify(this); + if (this.hasAllNullValues(true)) + return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep); + else + throw new Error('Set items must all have null values'); + } + static from(schema, iterable, ctx) { + const { replacer } = ctx; + const set = new this(schema); + if (iterable && Symbol.iterator in Object(iterable)) + for (let value of iterable) { + if (typeof replacer === 'function') + value = replacer.call(iterable, value, value); + set.items.push(Pair.createPair(value, null, ctx)); + } + return set; + } +} +YAMLSet.tag = 'tag:yaml.org,2002:set'; +const set = { + collection: 'map', + identify: value => value instanceof Set, + nodeClass: YAMLSet, + default: false, + tag: 'tag:yaml.org,2002:set', + createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx), + resolve(map, onError) { + if (identity.isMap(map)) { + if (map.hasAllNullValues(true)) + return Object.assign(new YAMLSet(), map); + else + onError('Set items must all have null values'); + } + else + onError('Expected a mapping for this tag'); + return map; + } +}; + +exports.YAMLSet = YAMLSet; +exports.set = set; + + +/***/ }), + +/***/ 1156: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var stringifyNumber = __nccwpck_require__(4174); + +/** Internal types handle bigint as number, because TS can't figure it out. */ +function parseSexagesimal(str, asBigInt) { + const sign = str[0]; + const parts = sign === '-' || sign === '+' ? str.substring(1) : str; + const num = (n) => asBigInt ? BigInt(n) : Number(n); + const res = parts + .replace(/_/g, '') + .split(':') + .reduce((res, p) => res * num(60) + num(p), num(0)); + return (sign === '-' ? num(-1) * res : res); +} +/** + * hhhh:mm:ss.sss + * + * Internal types handle bigint as number, because TS can't figure it out. + */ +function stringifySexagesimal(node) { + let { value } = node; + let num = (n) => n; + if (typeof value === 'bigint') + num = n => BigInt(n); + else if (isNaN(value) || !isFinite(value)) + return stringifyNumber.stringifyNumber(node); + let sign = ''; + if (value < 0) { + sign = '-'; + value *= num(-1); + } + const _60 = num(60); + const parts = [value % _60]; // seconds, including ms + if (value < 60) { + parts.unshift(0); // at least one : is required + } + else { + value = (value - parts[0]) / _60; + parts.unshift(value % _60); // minutes + if (value >= 60) { + value = (value - parts[0]) / _60; + parts.unshift(value); // hours + } + } + return (sign + + parts + .map(n => String(n).padStart(2, '0')) + .join(':') + .replace(/000000\d*$/, '') // % 60 may introduce error + ); +} +const intTime = { + identify: value => typeof value === 'bigint' || Number.isInteger(value), + default: true, + tag: 'tag:yaml.org,2002:int', + format: 'TIME', + test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/, + resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt), + stringify: stringifySexagesimal +}; +const floatTime = { + identify: value => typeof value === 'number', + default: true, + tag: 'tag:yaml.org,2002:float', + format: 'TIME', + test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\.[0-9_]*$/, + resolve: str => parseSexagesimal(str, false), + stringify: stringifySexagesimal +}; +const timestamp = { + identify: value => value instanceof Date, + default: true, + tag: 'tag:yaml.org,2002:timestamp', + // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part + // may be omitted altogether, resulting in a date format. In such a case, the time part is + // assumed to be 00:00:00Z (start of day, UTC). + test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd + '(?:' + // time is optional + '(?:t|T|[ \\t]+)' + // t | T | whitespace + '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)? + '(?:[ \\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30 + ')?$'), + resolve(str) { + const match = str.match(timestamp.test); + if (!match) + throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd'); + const [, year, month, day, hour, minute, second] = match.map(Number); + const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0; + let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec); + const tz = match[8]; + if (tz && tz !== 'Z') { + let d = parseSexagesimal(tz, false); + if (Math.abs(d) < 30) + d *= 60; + date -= 60000 * d; + } + return new Date(date); + }, + stringify: ({ value }) => value.toISOString().replace(/(T00:00:00)?\.000Z$/, '') +}; + +exports.floatTime = floatTime; +exports.intTime = intTime; +exports.timestamp = timestamp; + + +/***/ }), + +/***/ 2889: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +const FOLD_FLOW = 'flow'; +const FOLD_BLOCK = 'block'; +const FOLD_QUOTED = 'quoted'; +/** + * Tries to keep input at up to `lineWidth` characters, splitting only on spaces + * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are + * terminated with `\n` and started with `indent`. + */ +function foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) { + if (!lineWidth || lineWidth < 0) + return text; + if (lineWidth < minContentWidth) + minContentWidth = 0; + const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length); + if (text.length <= endStep) + return text; + const folds = []; + const escapedFolds = {}; + let end = lineWidth - indent.length; + if (typeof indentAtStart === 'number') { + if (indentAtStart > lineWidth - Math.max(2, minContentWidth)) + folds.push(0); + else + end = lineWidth - indentAtStart; + } + let split = undefined; + let prev = undefined; + let overflow = false; + let i = -1; + let escStart = -1; + let escEnd = -1; + if (mode === FOLD_BLOCK) { + i = consumeMoreIndentedLines(text, i, indent.length); + if (i !== -1) + end = i + endStep; + } + for (let ch; (ch = text[(i += 1)]);) { + if (mode === FOLD_QUOTED && ch === '\\') { + escStart = i; + switch (text[i + 1]) { + case 'x': + i += 3; + break; + case 'u': + i += 5; + break; + case 'U': + i += 9; + break; + default: + i += 1; + } + escEnd = i; + } + if (ch === '\n') { + if (mode === FOLD_BLOCK) + i = consumeMoreIndentedLines(text, i, indent.length); + end = i + indent.length + endStep; + split = undefined; + } + else { + if (ch === ' ' && + prev && + prev !== ' ' && + prev !== '\n' && + prev !== '\t') { + // space surrounded by non-space can be replaced with newline + indent + const next = text[i + 1]; + if (next && next !== ' ' && next !== '\n' && next !== '\t') + split = i; + } + if (i >= end) { + if (split) { + folds.push(split); + end = split + endStep; + split = undefined; + } + else if (mode === FOLD_QUOTED) { + // white-space collected at end may stretch past lineWidth + while (prev === ' ' || prev === '\t') { + prev = ch; + ch = text[(i += 1)]; + overflow = true; + } + // Account for newline escape, but don't break preceding escape + const j = i > escEnd + 1 ? i - 2 : escStart - 1; + // Bail out if lineWidth & minContentWidth are shorter than an escape string + if (escapedFolds[j]) + return text; + folds.push(j); + escapedFolds[j] = true; + end = j + endStep; + split = undefined; + } + else { + overflow = true; + } + } + } + prev = ch; + } + if (overflow && onOverflow) + onOverflow(); + if (folds.length === 0) + return text; + if (onFold) + onFold(); + let res = text.slice(0, folds[0]); + for (let i = 0; i < folds.length; ++i) { + const fold = folds[i]; + const end = folds[i + 1] || text.length; + if (fold === 0) + res = `\n${indent}${text.slice(0, end)}`; + else { + if (mode === FOLD_QUOTED && escapedFolds[fold]) + res += `${text[fold]}\\`; + res += `\n${indent}${text.slice(fold + 1, end)}`; + } + } + return res; +} +/** + * Presumes `i + 1` is at the start of a line + * @returns index of last newline in more-indented block + */ +function consumeMoreIndentedLines(text, i, indent) { + let end = i; + let start = i + 1; + let ch = text[start]; + while (ch === ' ' || ch === '\t') { + if (i < start + indent) { + ch = text[++i]; + } + else { + do { + ch = text[++i]; + } while (ch && ch !== '\n'); + end = i; + start = i + 1; + ch = text[start]; + } + } + return end; +} + +exports.FOLD_BLOCK = FOLD_BLOCK; +exports.FOLD_FLOW = FOLD_FLOW; +exports.FOLD_QUOTED = FOLD_QUOTED; +exports.foldFlowLines = foldFlowLines; + + +/***/ }), + +/***/ 8409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var anchors = __nccwpck_require__(8459); +var identity = __nccwpck_require__(5589); +var stringifyComment = __nccwpck_require__(5182); +var stringifyString = __nccwpck_require__(6226); + +function createStringifyContext(doc, options) { + const opt = Object.assign({ + blockQuote: true, + commentString: stringifyComment.stringifyComment, + defaultKeyType: null, + defaultStringType: 'PLAIN', + directives: null, + doubleQuotedAsJSON: false, + doubleQuotedMinMultiLineLength: 40, + falseStr: 'false', + flowCollectionPadding: true, + indentSeq: true, + lineWidth: 80, + minContentWidth: 20, + nullStr: 'null', + simpleKeys: false, + singleQuote: null, + trueStr: 'true', + verifyAliasOrder: true + }, doc.schema.toStringOptions, options); + let inFlow; + switch (opt.collectionStyle) { + case 'block': + inFlow = false; + break; + case 'flow': + inFlow = true; + break; + default: + inFlow = null; + } + return { + anchors: new Set(), + doc, + flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '', + indent: '', + indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : ' ', + inFlow, + options: opt + }; +} +function getTagObject(tags, item) { + if (item.tag) { + const match = tags.filter(t => t.tag === item.tag); + if (match.length > 0) + return match.find(t => t.format === item.format) ?? match[0]; + } + let tagObj = undefined; + let obj; + if (identity.isScalar(item)) { + obj = item.value; + let match = tags.filter(t => t.identify?.(obj)); + if (match.length > 1) { + const testMatch = match.filter(t => t.test); + if (testMatch.length > 0) + match = testMatch; + } + tagObj = + match.find(t => t.format === item.format) ?? match.find(t => !t.format); + } + else { + obj = item; + tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass); + } + if (!tagObj) { + const name = obj?.constructor?.name ?? typeof obj; + throw new Error(`Tag not resolved for ${name} value`); + } + return tagObj; +} +// needs to be called before value stringifier to allow for circular anchor refs +function stringifyProps(node, tagObj, { anchors: anchors$1, doc }) { + if (!doc.directives) + return ''; + const props = []; + const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor; + if (anchor && anchors.anchorIsValid(anchor)) { + anchors$1.add(anchor); + props.push(`&${anchor}`); + } + const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag; + if (tag) + props.push(doc.directives.tagString(tag)); + return props.join(' '); +} +function stringify(item, ctx, onComment, onChompKeep) { + if (identity.isPair(item)) + return item.toString(ctx, onComment, onChompKeep); + if (identity.isAlias(item)) { + if (ctx.doc.directives) + return item.toString(ctx); + if (ctx.resolvedAliases?.has(item)) { + throw new TypeError(`Cannot stringify circular structure without alias nodes`); + } + else { + if (ctx.resolvedAliases) + ctx.resolvedAliases.add(item); + else + ctx.resolvedAliases = new Set([item]); + item = item.resolve(ctx.doc); + } + } + let tagObj = undefined; + const node = identity.isNode(item) + ? item + : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) }); + if (!tagObj) + tagObj = getTagObject(ctx.doc.schema.tags, node); + const props = stringifyProps(node, tagObj, ctx); + if (props.length > 0) + ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1; + const str = typeof tagObj.stringify === 'function' + ? tagObj.stringify(node, ctx, onComment, onChompKeep) + : identity.isScalar(node) + ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep) + : node.toString(ctx, onComment, onChompKeep); + if (!props) + return str; + return identity.isScalar(node) || str[0] === '{' || str[0] === '[' + ? `${props} ${str}` + : `${props}\n${ctx.indent}${str}`; +} + +exports.createStringifyContext = createStringifyContext; +exports.stringify = stringify; + + +/***/ }), + +/***/ 2466: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyCollection(collection, ctx, options) { + const flow = ctx.inFlow ?? collection.flow; + const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection; + return stringify(collection, ctx, options); +} +function stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) { + const { indent, options: { commentString } } = ctx; + const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null }); + let chompKeep = false; // flag for the preceding node's status + const lines = []; + for (let i = 0; i < items.length; ++i) { + const item = items[i]; + let comment = null; + if (identity.isNode(item)) { + if (!chompKeep && item.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, item.commentBefore, chompKeep); + if (item.comment) + comment = item.comment; + } + else if (identity.isPair(item)) { + const ik = identity.isNode(item.key) ? item.key : null; + if (ik) { + if (!chompKeep && ik.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, ik.commentBefore, chompKeep); + } + } + chompKeep = false; + let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true)); + if (comment) + str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); + if (chompKeep && comment) + chompKeep = false; + lines.push(blockItemPrefix + str); + } + let str; + if (lines.length === 0) { + str = flowChars.start + flowChars.end; + } + else { + str = lines[0]; + for (let i = 1; i < lines.length; ++i) { + const line = lines[i]; + str += line ? `\n${indent}${line}` : '\n'; + } + } + if (comment) { + str += '\n' + stringifyComment.indentComment(commentString(comment), indent); + if (onComment) + onComment(); + } + else if (chompKeep && onChompKeep) + onChompKeep(); + return str; +} +function stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) { + const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx; + itemIndent += indentStep; + const itemCtx = Object.assign({}, ctx, { + indent: itemIndent, + inFlow: true, + type: null + }); + let reqNewline = false; + let linesAtValue = 0; + const lines = []; + for (let i = 0; i < items.length; ++i) { + const item = items[i]; + let comment = null; + if (identity.isNode(item)) { + if (item.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, item.commentBefore, false); + if (item.comment) + comment = item.comment; + } + else if (identity.isPair(item)) { + const ik = identity.isNode(item.key) ? item.key : null; + if (ik) { + if (ik.spaceBefore) + lines.push(''); + addCommentBefore(ctx, lines, ik.commentBefore, false); + if (ik.comment) + reqNewline = true; + } + const iv = identity.isNode(item.value) ? item.value : null; + if (iv) { + if (iv.comment) + comment = iv.comment; + if (iv.commentBefore) + reqNewline = true; + } + else if (item.value == null && ik?.comment) { + comment = ik.comment; + } + } + if (comment) + reqNewline = true; + let str = stringify.stringify(item, itemCtx, () => (comment = null)); + if (i < items.length - 1) + str += ','; + if (comment) + str += stringifyComment.lineComment(str, itemIndent, commentString(comment)); + if (!reqNewline && (lines.length > linesAtValue || str.includes('\n'))) + reqNewline = true; + lines.push(str); + linesAtValue = lines.length; + } + const { start, end } = flowChars; + if (lines.length === 0) { + return start + end; + } + else { + if (!reqNewline) { + const len = lines.reduce((sum, line) => sum + line.length + 2, 2); + reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth; + } + if (reqNewline) { + let str = start; + for (const line of lines) + str += line ? `\n${indentStep}${indent}${line}` : '\n'; + return `${str}\n${indent}${end}`; + } + else { + return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`; + } + } +} +function addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) { + if (comment && chompKeep) + comment = comment.replace(/^\n+/, ''); + if (comment) { + const ic = stringifyComment.indentComment(commentString(comment), indent); + lines.push(ic.trimStart()); // Avoid double indent on first line + } +} + +exports.stringifyCollection = stringifyCollection; + + +/***/ }), + +/***/ 5182: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +/** + * Stringifies a comment. + * + * Empty comment lines are left empty, + * lines consisting of a single space are replaced by `#`, + * and all other lines are prefixed with a `#`. + */ +const stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#'); +function indentComment(comment, indent) { + if (/^\n+$/.test(comment)) + return comment.substring(1); + return indent ? comment.replace(/^(?! *$)/gm, indent) : comment; +} +const lineComment = (str, indent, comment) => str.endsWith('\n') + ? indentComment(comment, indent) + : comment.includes('\n') + ? '\n' + indentComment(comment, indent) + : (str.endsWith(' ') ? '' : ' ') + comment; + +exports.indentComment = indentComment; +exports.lineComment = lineComment; +exports.stringifyComment = stringifyComment; + + +/***/ }), + +/***/ 5225: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyDocument(doc, options) { + const lines = []; + let hasDirectives = options.directives === true; + if (options.directives !== false && doc.directives) { + const dir = doc.directives.toString(doc); + if (dir) { + lines.push(dir); + hasDirectives = true; + } + else if (doc.directives.docStart) + hasDirectives = true; + } + if (hasDirectives) + lines.push('---'); + const ctx = stringify.createStringifyContext(doc, options); + const { commentString } = ctx.options; + if (doc.commentBefore) { + if (lines.length !== 1) + lines.unshift(''); + const cs = commentString(doc.commentBefore); + lines.unshift(stringifyComment.indentComment(cs, '')); + } + let chompKeep = false; + let contentComment = null; + if (doc.contents) { + if (identity.isNode(doc.contents)) { + if (doc.contents.spaceBefore && hasDirectives) + lines.push(''); + if (doc.contents.commentBefore) { + const cs = commentString(doc.contents.commentBefore); + lines.push(stringifyComment.indentComment(cs, '')); + } + // top-level block scalars need to be indented if followed by a comment + ctx.forceBlockIndent = !!doc.comment; + contentComment = doc.contents.comment; + } + const onChompKeep = contentComment ? undefined : () => (chompKeep = true); + let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep); + if (contentComment) + body += stringifyComment.lineComment(body, '', commentString(contentComment)); + if ((body[0] === '|' || body[0] === '>') && + lines[lines.length - 1] === '---') { + // Top-level block scalars with a preceding doc marker ought to use the + // same line for their header. + lines[lines.length - 1] = `--- ${body}`; + } + else + lines.push(body); + } + else { + lines.push(stringify.stringify(doc.contents, ctx)); + } + if (doc.directives?.docEnd) { + if (doc.comment) { + const cs = commentString(doc.comment); + if (cs.includes('\n')) { + lines.push('...'); + lines.push(stringifyComment.indentComment(cs, '')); + } + else { + lines.push(`... ${cs}`); + } + } + else { + lines.push('...'); + } + } + else { + let dc = doc.comment; + if (dc && chompKeep) + dc = dc.replace(/^\n+/, ''); + if (dc) { + if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '') + lines.push(''); + lines.push(stringifyComment.indentComment(commentString(dc), '')); + } + } + return lines.join('\n') + '\n'; +} + +exports.stringifyDocument = stringifyDocument; + + +/***/ }), + +/***/ 4174: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +function stringifyNumber({ format, minFractionDigits, tag, value }) { + if (typeof value === 'bigint') + return String(value); + const num = typeof value === 'number' ? value : Number(value); + if (!isFinite(num)) + return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf'; + let n = JSON.stringify(value); + if (!format && + minFractionDigits && + (!tag || tag === 'tag:yaml.org,2002:float') && + /^\d/.test(n)) { + let i = n.indexOf('.'); + if (i < 0) { + i = n.length; + n += '.'; + } + let d = minFractionDigits - (n.length - i - 1); + while (d-- > 0) + n += '0'; + } + return n; +} + +exports.stringifyNumber = stringifyNumber; + + +/***/ }), + +/***/ 4875: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); +var Scalar = __nccwpck_require__(9338); +var stringify = __nccwpck_require__(8409); +var stringifyComment = __nccwpck_require__(5182); + +function stringifyPair({ key, value }, ctx, onComment, onChompKeep) { + const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx; + let keyComment = (identity.isNode(key) && key.comment) || null; + if (simpleKeys) { + if (keyComment) { + throw new Error('With simple keys, key nodes cannot have comments'); + } + if (identity.isCollection(key) || (!identity.isNode(key) && typeof key === 'object')) { + const msg = 'With simple keys, collection cannot be used as a key value'; + throw new Error(msg); + } + } + let explicitKey = !simpleKeys && + (!key || + (keyComment && value == null && !ctx.inFlow) || + identity.isCollection(key) || + (identity.isScalar(key) + ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL + : typeof key === 'object')); + ctx = Object.assign({}, ctx, { + allNullValues: false, + implicitKey: !explicitKey && (simpleKeys || !allNullValues), + indent: indent + indentStep + }); + let keyCommentDone = false; + let chompKeep = false; + let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true)); + if (!explicitKey && !ctx.inFlow && str.length > 1024) { + if (simpleKeys) + throw new Error('With simple keys, single line scalar must not span more than 1024 characters'); + explicitKey = true; + } + if (ctx.inFlow) { + if (allNullValues || value == null) { + if (keyCommentDone && onComment) + onComment(); + return str === '' ? '?' : explicitKey ? `? ${str}` : str; + } + } + else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) { + str = `? ${str}`; + if (keyComment && !keyCommentDone) { + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + } + else if (chompKeep && onChompKeep) + onChompKeep(); + return str; + } + if (keyCommentDone) + keyComment = null; + if (explicitKey) { + if (keyComment) + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + str = `? ${str}\n${indent}:`; + } + else { + str = `${str}:`; + if (keyComment) + str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment)); + } + let vsb, vcb, valueComment; + if (identity.isNode(value)) { + vsb = !!value.spaceBefore; + vcb = value.commentBefore; + valueComment = value.comment; + } + else { + vsb = false; + vcb = null; + valueComment = null; + if (value && typeof value === 'object') + value = doc.createNode(value); + } + ctx.implicitKey = false; + if (!explicitKey && !keyComment && identity.isScalar(value)) + ctx.indentAtStart = str.length + 1; + chompKeep = false; + if (!indentSeq && + indentStep.length >= 2 && + !ctx.inFlow && + !explicitKey && + identity.isSeq(value) && + !value.flow && + !value.tag && + !value.anchor) { + // If indentSeq === false, consider '- ' as part of indentation where possible + ctx.indent = ctx.indent.substring(2); + } + let valueCommentDone = false; + const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true)); + let ws = ' '; + if (keyComment || vsb || vcb) { + ws = vsb ? '\n' : ''; + if (vcb) { + const cs = commentString(vcb); + ws += `\n${stringifyComment.indentComment(cs, ctx.indent)}`; + } + if (valueStr === '' && !ctx.inFlow) { + if (ws === '\n') + ws = '\n\n'; + } + else { + ws += `\n${ctx.indent}`; + } + } + else if (!explicitKey && identity.isCollection(value)) { + const vs0 = valueStr[0]; + const nl0 = valueStr.indexOf('\n'); + const hasNewline = nl0 !== -1; + const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0; + if (hasNewline || !flow) { + let hasPropsLine = false; + if (hasNewline && (vs0 === '&' || vs0 === '!')) { + let sp0 = valueStr.indexOf(' '); + if (vs0 === '&' && + sp0 !== -1 && + sp0 < nl0 && + valueStr[sp0 + 1] === '!') { + sp0 = valueStr.indexOf(' ', sp0 + 1); + } + if (sp0 === -1 || nl0 < sp0) + hasPropsLine = true; + } + if (!hasPropsLine) + ws = `\n${ctx.indent}`; + } + } + else if (valueStr === '' || valueStr[0] === '\n') { + ws = ''; + } + str += ws + valueStr; + if (ctx.inFlow) { + if (valueCommentDone && onComment) + onComment(); + } + else if (valueComment && !valueCommentDone) { + str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment)); + } + else if (chompKeep && onChompKeep) { + onChompKeep(); + } + return str; +} + +exports.stringifyPair = stringifyPair; + + +/***/ }), + +/***/ 6226: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var Scalar = __nccwpck_require__(9338); +var foldFlowLines = __nccwpck_require__(2889); + +const getFoldOptions = (ctx, isBlock) => ({ + indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart, + lineWidth: ctx.options.lineWidth, + minContentWidth: ctx.options.minContentWidth +}); +// Also checks for lines starting with %, as parsing the output as YAML 1.1 will +// presume that's starting a new document. +const containsDocumentMarker = (str) => /^(%|---|\.\.\.)/m.test(str); +function lineLengthOverLimit(str, lineWidth, indentLength) { + if (!lineWidth || lineWidth < 0) + return false; + const limit = lineWidth - indentLength; + const strLen = str.length; + if (strLen <= limit) + return false; + for (let i = 0, start = 0; i < strLen; ++i) { + if (str[i] === '\n') { + if (i - start > limit) + return true; + start = i + 1; + if (strLen - start <= limit) + return false; + } + } + return true; +} +function doubleQuotedString(value, ctx) { + const json = JSON.stringify(value); + if (ctx.options.doubleQuotedAsJSON) + return json; + const { implicitKey } = ctx; + const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength; + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + let str = ''; + let start = 0; + for (let i = 0, ch = json[i]; ch; ch = json[++i]) { + if (ch === ' ' && json[i + 1] === '\\' && json[i + 2] === 'n') { + // space before newline needs to be escaped to not be folded + str += json.slice(start, i) + '\\ '; + i += 1; + start = i; + ch = '\\'; + } + if (ch === '\\') + switch (json[i + 1]) { + case 'u': + { + str += json.slice(start, i); + const code = json.substr(i + 2, 4); + switch (code) { + case '0000': + str += '\\0'; + break; + case '0007': + str += '\\a'; + break; + case '000b': + str += '\\v'; + break; + case '001b': + str += '\\e'; + break; + case '0085': + str += '\\N'; + break; + case '00a0': + str += '\\_'; + break; + case '2028': + str += '\\L'; + break; + case '2029': + str += '\\P'; + break; + default: + if (code.substr(0, 2) === '00') + str += '\\x' + code.substr(2); + else + str += json.substr(i, 6); + } + i += 5; + start = i + 1; + } + break; + case 'n': + if (implicitKey || + json[i + 2] === '"' || + json.length < minMultiLineLength) { + i += 1; + } + else { + // folding will eat first newline + str += json.slice(start, i) + '\n\n'; + while (json[i + 2] === '\\' && + json[i + 3] === 'n' && + json[i + 4] !== '"') { + str += '\n'; + i += 2; + } + str += indent; + // space after newline needs to be escaped to not be folded + if (json[i + 2] === ' ') + str += '\\'; + i += 1; + start = i + 1; + } + break; + default: + i += 1; + } + } + str = start ? str + json.slice(start) : json; + return implicitKey + ? str + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false)); +} +function singleQuotedString(value, ctx) { + if (ctx.options.singleQuote === false || + (ctx.implicitKey && value.includes('\n')) || + /[ \t]\n|\n[ \t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline + ) + return doubleQuotedString(value, ctx); + const indent = ctx.indent || (containsDocumentMarker(value) ? ' ' : ''); + const res = "'" + value.replace(/'/g, "''").replace(/\n+/g, `$&\n${indent}`) + "'"; + return ctx.implicitKey + ? res + : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); +} +function quotedString(value, ctx) { + const { singleQuote } = ctx.options; + let qs; + if (singleQuote === false) + qs = doubleQuotedString; + else { + const hasDouble = value.includes('"'); + const hasSingle = value.includes("'"); + if (hasDouble && !hasSingle) + qs = singleQuotedString; + else if (hasSingle && !hasDouble) + qs = doubleQuotedString; + else + qs = singleQuote ? singleQuotedString : doubleQuotedString; + } + return qs(value, ctx); +} +// The negative lookbehind avoids a polynomial search, +// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind +let blockEndNewlines; +try { + blockEndNewlines = new RegExp('(^|(?\n'; + // determine chomping from whitespace at value end + let chomp; + let endStart; + for (endStart = value.length; endStart > 0; --endStart) { + const ch = value[endStart - 1]; + if (ch !== '\n' && ch !== '\t' && ch !== ' ') + break; + } + let end = value.substring(endStart); + const endNlPos = end.indexOf('\n'); + if (endNlPos === -1) { + chomp = '-'; // strip + } + else if (value === end || endNlPos !== end.length - 1) { + chomp = '+'; // keep + if (onChompKeep) + onChompKeep(); + } + else { + chomp = ''; // clip + } + if (end) { + value = value.slice(0, -end.length); + if (end[end.length - 1] === '\n') + end = end.slice(0, -1); + end = end.replace(blockEndNewlines, `$&${indent}`); + } + // determine indent indicator from whitespace at value start + let startWithSpace = false; + let startEnd; + let startNlPos = -1; + for (startEnd = 0; startEnd < value.length; ++startEnd) { + const ch = value[startEnd]; + if (ch === ' ') + startWithSpace = true; + else if (ch === '\n') + startNlPos = startEnd; + else + break; + } + let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd); + if (start) { + value = value.substring(start.length); + start = start.replace(/\n+/g, `$&${indent}`); + } + const indentSize = indent ? '2' : '1'; // root is at -1 + // Leading | or > is added later + let header = (startWithSpace ? indentSize : '') + chomp; + if (comment) { + header += ' ' + commentString(comment.replace(/ ?[\r\n]+/g, ' ')); + if (onComment) + onComment(); + } + if (!literal) { + const foldedValue = value + .replace(/\n+/g, '\n$&') + .replace(/(?:^|\n)([\t ].*)(?:([\n\t ]*)\n(?![\n\t ]))?/g, '$1$2') // more-indented lines aren't folded + // ^ more-ind. ^ empty ^ capture next empty lines only at end of indent + .replace(/\n+/g, `$&${indent}`); + let literalFallback = false; + const foldOptions = getFoldOptions(ctx, true); + if (blockQuote !== 'folded' && type !== Scalar.Scalar.BLOCK_FOLDED) { + foldOptions.onOverflow = () => { + literalFallback = true; + }; + } + const body = foldFlowLines.foldFlowLines(`${start}${foldedValue}${end}`, indent, foldFlowLines.FOLD_BLOCK, foldOptions); + if (!literalFallback) + return `>${header}\n${indent}${body}`; + } + value = value.replace(/\n+/g, `$&${indent}`); + return `|${header}\n${indent}${start}${value}${end}`; +} +function plainString(item, ctx, onComment, onChompKeep) { + const { type, value } = item; + const { actualString, implicitKey, indent, indentStep, inFlow } = ctx; + if ((implicitKey && value.includes('\n')) || + (inFlow && /[[\]{},]/.test(value))) { + return quotedString(value, ctx); + } + if (!value || + /^[\n\t ,[\]{}#&*!|>'"%@`]|^[?-]$|^[?-][ \t]|[\n:][ \t]|[ \t]\n|[\n\t ]#|[\n\t :]$/.test(value)) { + // not allowed: + // - empty string, '-' or '?' + // - start with an indicator character (except [?:-]) or /[?-] / + // - '\n ', ': ' or ' \n' anywhere + // - '#' not preceded by a non-space char + // - end with ' ' or ':' + return implicitKey || inFlow || !value.includes('\n') + ? quotedString(value, ctx) + : blockString(item, ctx, onComment, onChompKeep); + } + if (!implicitKey && + !inFlow && + type !== Scalar.Scalar.PLAIN && + value.includes('\n')) { + // Where allowed & type not set explicitly, prefer block style for multiline strings + return blockString(item, ctx, onComment, onChompKeep); + } + if (containsDocumentMarker(value)) { + if (indent === '') { + ctx.forceBlockIndent = true; + return blockString(item, ctx, onComment, onChompKeep); + } + else if (implicitKey && indent === indentStep) { + return quotedString(value, ctx); + } + } + const str = value.replace(/\n+/g, `$&\n${indent}`); + // Verify that output will be parsed as a string, as e.g. plain numbers and + // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'), + // and others in v1.1. + if (actualString) { + const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str); + const { compat, tags } = ctx.doc.schema; + if (tags.some(test) || compat?.some(test)) + return quotedString(value, ctx); + } + return implicitKey + ? str + : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false)); +} +function stringifyString(item, ctx, onComment, onChompKeep) { + const { implicitKey, inFlow } = ctx; + const ss = typeof item.value === 'string' + ? item + : Object.assign({}, item, { value: String(item.value) }); + let { type } = item; + if (type !== Scalar.Scalar.QUOTE_DOUBLE) { + // force double quotes on control characters & unpaired surrogates + if (/[\x00-\x08\x0b-\x1f\x7f-\x9f\u{D800}-\u{DFFF}]/u.test(ss.value)) + type = Scalar.Scalar.QUOTE_DOUBLE; + } + const _stringify = (_type) => { + switch (_type) { + case Scalar.Scalar.BLOCK_FOLDED: + case Scalar.Scalar.BLOCK_LITERAL: + return implicitKey || inFlow + ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers + : blockString(ss, ctx, onComment, onChompKeep); + case Scalar.Scalar.QUOTE_DOUBLE: + return doubleQuotedString(ss.value, ctx); + case Scalar.Scalar.QUOTE_SINGLE: + return singleQuotedString(ss.value, ctx); + case Scalar.Scalar.PLAIN: + return plainString(ss, ctx, onComment, onChompKeep); + default: + return null; + } + }; + let res = _stringify(type); + if (res === null) { + const { defaultKeyType, defaultStringType } = ctx.options; + const t = (implicitKey && defaultKeyType) || defaultStringType; + res = _stringify(t); + if (res === null) + throw new Error(`Unsupported default string type ${t}`); + } + return res; +} + +exports.stringifyString = stringifyString; + + +/***/ }), + +/***/ 6796: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +var identity = __nccwpck_require__(5589); + +const BREAK = Symbol('break visit'); +const SKIP = Symbol('skip children'); +const REMOVE = Symbol('remove node'); +/** + * Apply a visitor to an AST node or document. + * + * Walks through the tree (depth-first) starting from `node`, calling a + * `visitor` function with three arguments: + * - `key`: For sequence values and map `Pair`, the node's index in the + * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. + * `null` for the root node. + * - `node`: The current node. + * - `path`: The ancestry of the current node. + * + * The return value of the visitor may be used to control the traversal: + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this node, continue with next + * sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current node, then continue with the next one + * - `Node`: Replace the current node, then continue by visiting it + * - `number`: While iterating the items of a sequence or map, set the index + * of the next step. This is useful especially if the index of the current + * node has changed. + * + * If `visitor` is a single function, it will be called with all values + * encountered in the tree, including e.g. `null` values. Alternatively, + * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, + * `Alias` and `Scalar` node. To define the same visitor function for more than + * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) + * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most + * specific defined one will be used for each node. + */ +function visit(node, visitor) { + const visitor_ = initVisitor(visitor); + if (identity.isDocument(node)) { + const cd = visit_(null, node.contents, visitor_, Object.freeze([node])); + if (cd === REMOVE) + node.contents = null; + } + else + visit_(null, node, visitor_, Object.freeze([])); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visit.BREAK = BREAK; +/** Do not visit the children of the current node */ +visit.SKIP = SKIP; +/** Remove the current node */ +visit.REMOVE = REMOVE; +function visit_(key, node, visitor, path) { + const ctrl = callVisitor(key, node, visitor, path); + if (identity.isNode(ctrl) || identity.isPair(ctrl)) { + replaceNode(key, path, ctrl); + return visit_(key, ctrl, visitor, path); + } + if (typeof ctrl !== 'symbol') { + if (identity.isCollection(node)) { + path = Object.freeze(path.concat(node)); + for (let i = 0; i < node.items.length; ++i) { + const ci = visit_(i, node.items[i], visitor, path); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + node.items.splice(i, 1); + i -= 1; + } + } + } + else if (identity.isPair(node)) { + path = Object.freeze(path.concat(node)); + const ck = visit_('key', node.key, visitor, path); + if (ck === BREAK) + return BREAK; + else if (ck === REMOVE) + node.key = null; + const cv = visit_('value', node.value, visitor, path); + if (cv === BREAK) + return BREAK; + else if (cv === REMOVE) + node.value = null; + } + } + return ctrl; +} +/** + * Apply an async visitor to an AST node or document. + * + * Walks through the tree (depth-first) starting from `node`, calling a + * `visitor` function with three arguments: + * - `key`: For sequence values and map `Pair`, the node's index in the + * collection. Within a `Pair`, `'key'` or `'value'`, correspondingly. + * `null` for the root node. + * - `node`: The current node. + * - `path`: The ancestry of the current node. + * + * The return value of the visitor may be used to control the traversal: + * - `Promise`: Must resolve to one of the following values + * - `undefined` (default): Do nothing and continue + * - `visit.SKIP`: Do not visit the children of this node, continue with next + * sibling + * - `visit.BREAK`: Terminate traversal completely + * - `visit.REMOVE`: Remove the current node, then continue with the next one + * - `Node`: Replace the current node, then continue by visiting it + * - `number`: While iterating the items of a sequence or map, set the index + * of the next step. This is useful especially if the index of the current + * node has changed. + * + * If `visitor` is a single function, it will be called with all values + * encountered in the tree, including e.g. `null` values. Alternatively, + * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`, + * `Alias` and `Scalar` node. To define the same visitor function for more than + * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar) + * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most + * specific defined one will be used for each node. + */ +async function visitAsync(node, visitor) { + const visitor_ = initVisitor(visitor); + if (identity.isDocument(node)) { + const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node])); + if (cd === REMOVE) + node.contents = null; + } + else + await visitAsync_(null, node, visitor_, Object.freeze([])); +} +// Without the `as symbol` casts, TS declares these in the `visit` +// namespace using `var`, but then complains about that because +// `unique symbol` must be `const`. +/** Terminate visit traversal completely */ +visitAsync.BREAK = BREAK; +/** Do not visit the children of the current node */ +visitAsync.SKIP = SKIP; +/** Remove the current node */ +visitAsync.REMOVE = REMOVE; +async function visitAsync_(key, node, visitor, path) { + const ctrl = await callVisitor(key, node, visitor, path); + if (identity.isNode(ctrl) || identity.isPair(ctrl)) { + replaceNode(key, path, ctrl); + return visitAsync_(key, ctrl, visitor, path); + } + if (typeof ctrl !== 'symbol') { + if (identity.isCollection(node)) { + path = Object.freeze(path.concat(node)); + for (let i = 0; i < node.items.length; ++i) { + const ci = await visitAsync_(i, node.items[i], visitor, path); + if (typeof ci === 'number') + i = ci - 1; + else if (ci === BREAK) + return BREAK; + else if (ci === REMOVE) { + node.items.splice(i, 1); + i -= 1; + } + } + } + else if (identity.isPair(node)) { + path = Object.freeze(path.concat(node)); + const ck = await visitAsync_('key', node.key, visitor, path); + if (ck === BREAK) + return BREAK; + else if (ck === REMOVE) + node.key = null; + const cv = await visitAsync_('value', node.value, visitor, path); + if (cv === BREAK) + return BREAK; + else if (cv === REMOVE) + node.value = null; + } + } + return ctrl; +} +function initVisitor(visitor) { + if (typeof visitor === 'object' && + (visitor.Collection || visitor.Node || visitor.Value)) { + return Object.assign({ + Alias: visitor.Node, + Map: visitor.Node, + Scalar: visitor.Node, + Seq: visitor.Node + }, visitor.Value && { + Map: visitor.Value, + Scalar: visitor.Value, + Seq: visitor.Value + }, visitor.Collection && { + Map: visitor.Collection, + Seq: visitor.Collection + }, visitor); + } + return visitor; +} +function callVisitor(key, node, visitor, path) { + if (typeof visitor === 'function') + return visitor(key, node, path); + if (identity.isMap(node)) + return visitor.Map?.(key, node, path); + if (identity.isSeq(node)) + return visitor.Seq?.(key, node, path); + if (identity.isPair(node)) + return visitor.Pair?.(key, node, path); + if (identity.isScalar(node)) + return visitor.Scalar?.(key, node, path); + if (identity.isAlias(node)) + return visitor.Alias?.(key, node, path); + return undefined; +} +function replaceNode(key, path, node) { + const parent = path[path.length - 1]; + if (identity.isCollection(parent)) { + parent.items[key] = node; + } + else if (identity.isPair(parent)) { + if (key === 'key') + parent.key = node; + else + parent.value = node; + } + else if (identity.isDocument(parent)) { + parent.contents = node; + } + else { + const pt = identity.isAlias(parent) ? 'alias' : 'scalar'; + throw new Error(`Cannot replace node with ${pt} parent`); + } +} + +exports.visit = visit; +exports.visitAsync = visitAsync; + + /***/ }), /***/ 2020: diff --git a/dist/index.js.map b/dist/index.js.map index 9331344..0f25d7a 100644 --- a/dist/index.js.map +++ b/dist/index.js.map @@ -1 +1 @@ -{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpkCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpnIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7HA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;;;;;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5zBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;;;;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://open-ai-reviewer/./lib/main.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/command.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/core.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/file-command.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/path-utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/summary.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/auth.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/index.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/proxy.js","../webpack://open-ai-reviewer/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/core/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-request-log/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/request/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/rest/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/abort-controller/dist/abort-controller.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/index.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/agent.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/constants.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/https_agent.js","../webpack://open-ai-reviewer/./node_modules/balanced-match/index.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/index.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/add.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/register.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/remove.js","../webpack://open-ai-reviewer/./node_modules/brace-expansion/index.js","../webpack://open-ai-reviewer/./node_modules/deprecation/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/event-target-shim/dist/event-target-shim.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/node_modules/web-streams-polyfill/dist/ponyfill.js","../webpack://open-ai-reviewer/./node_modules/humanize-ms/index.js","../webpack://open-ai-reviewer/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://open-ai-reviewer/./node_modules/ms/index.js","../webpack://open-ai-reviewer/./node_modules/node-domexception/index.js","../webpack://open-ai-reviewer/./node_modules/node-fetch/lib/index.js","../webpack://open-ai-reviewer/./node_modules/once/once.js","../webpack://open-ai-reviewer/./node_modules/parse-diff/index.js","../webpack://open-ai-reviewer/./node_modules/tr46/index.js","../webpack://open-ai-reviewer/./node_modules/tunnel/index.js","../webpack://open-ai-reviewer/./node_modules/tunnel/lib/tunnel.js","../webpack://open-ai-reviewer/./node_modules/universal-user-agent/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/index.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/md5.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/nil.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/parse.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/regex.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/rng.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/sha1.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/stringify.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v1.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v3.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v35.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v4.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v5.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/validate.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/version.js","../webpack://open-ai-reviewer/./node_modules/web-streams-polyfill/dist/ponyfill.es2018.js","../webpack://open-ai-reviewer/./node_modules/webidl-conversions/lib/index.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/URL.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/public-api.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/utils.js","../webpack://open-ai-reviewer/./node_modules/wrappy/wrappy.js","../webpack://open-ai-reviewer/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://open-ai-reviewer/external node-commonjs \"assert\"","../webpack://open-ai-reviewer/external node-commonjs \"crypto\"","../webpack://open-ai-reviewer/external node-commonjs \"events\"","../webpack://open-ai-reviewer/external node-commonjs \"fs\"","../webpack://open-ai-reviewer/external node-commonjs \"http\"","../webpack://open-ai-reviewer/external node-commonjs \"https\"","../webpack://open-ai-reviewer/external node-commonjs \"net\"","../webpack://open-ai-reviewer/external node-commonjs \"node:fs\"","../webpack://open-ai-reviewer/external node-commonjs \"node:stream\"","../webpack://open-ai-reviewer/external node-commonjs \"os\"","../webpack://open-ai-reviewer/external node-commonjs \"path\"","../webpack://open-ai-reviewer/external node-commonjs \"punycode\"","../webpack://open-ai-reviewer/external node-commonjs \"stream\"","../webpack://open-ai-reviewer/external node-commonjs \"tls\"","../webpack://open-ai-reviewer/external node-commonjs \"url\"","../webpack://open-ai-reviewer/external node-commonjs \"util\"","../webpack://open-ai-reviewer/external node-commonjs \"worker_threads\"","../webpack://open-ai-reviewer/external node-commonjs \"zlib\"","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FileLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FormDataLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/createBoundary.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/escapeName.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFileLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFormData.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFunction.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/Blob.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/File.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/FormData.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/blobHelpers.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/fileFromPath.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isBlob.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isFile.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isFunction.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isPlainObject.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/brace-expressions.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/escape.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/index-cjs.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/unescape.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/MultipartBody.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/auto/runtime-node.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/index.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/node-runtime.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/registry.js","../webpack://open-ai-reviewer/./node_modules/openai/core.js","../webpack://open-ai-reviewer/./node_modules/openai/error.js","../webpack://open-ai-reviewer/./node_modules/openai/index.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/AbstractChatCompletionRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionStream.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionStreamingRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/RunnableFunction.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/chatCompletionUtils.js","../webpack://open-ai-reviewer/./node_modules/openai/pagination.js","../webpack://open-ai-reviewer/./node_modules/openai/resource.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/audio.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/speech.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/transcriptions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/translations.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/assistants/assistants.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/assistants/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/beta.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/chat/chat.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/chat/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/messages/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/messages/messages.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/runs/runs.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/runs/steps.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/threads.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/chat.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/index.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/edits.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/embeddings.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tunes.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tuning/fine-tuning.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tuning/jobs.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/images.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/index.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/models.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/moderations.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/shared.js","../webpack://open-ai-reviewer/./node_modules/openai/streaming.js","../webpack://open-ai-reviewer/./node_modules/openai/uploads.js","../webpack://open-ai-reviewer/./node_modules/openai/version.js","../webpack://open-ai-reviewer/webpack/bootstrap","../webpack://open-ai-reviewer/webpack/runtime/compat","../webpack://open-ai-reviewer/webpack/before-startup","../webpack://open-ai-reviewer/webpack/startup","../webpack://open-ai-reviewer/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n var desc = Object.getOwnPropertyDescriptor(m, k);\n if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n desc = { enumerable: true, get: function() { return m[k]; } };\n }\n Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst fs_1 = require(\"fs\");\nconst core = __importStar(require(\"@actions/core\"));\nconst openai_1 = __importDefault(require(\"openai\"));\nconst rest_1 = require(\"@octokit/rest\");\nconst parse_diff_1 = __importDefault(require(\"parse-diff\"));\nconst minimatch_1 = __importDefault(require(\"minimatch\"));\nconst GITHUB_TOKEN = core.getInput(\"GITHUB_TOKEN\");\nconst OPENAI_API_KEY = core.getInput(\"OPENAI_API_KEY\");\nconst OPENAI_API_MODEL = core.getInput(\"OPENAI_API_MODEL\");\nconst octokit = new rest_1.Octokit({ auth: GITHUB_TOKEN });\nconst openai = new openai_1.default({\n apiKey: OPENAI_API_KEY,\n});\nfunction getPRDetails() {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n const { repository, number } = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH || \"\", \"utf8\"));\n const prResponse = yield octokit.pulls.get({\n owner: repository.owner.login,\n repo: repository.name,\n pull_number: number,\n });\n return {\n owner: repository.owner.login,\n repo: repository.name,\n pull_number: number,\n title: (_a = prResponse.data.title) !== null && _a !== void 0 ? _a : \"\",\n description: (_b = prResponse.data.body) !== null && _b !== void 0 ? _b : \"\",\n };\n });\n}\nfunction getDiff(owner, repo, pull_number) {\n return __awaiter(this, void 0, void 0, function* () {\n const response = yield octokit.pulls.get({\n owner,\n repo,\n pull_number,\n mediaType: { format: \"diff\" },\n });\n // @ts-expect-error - response.data is a string\n return response.data;\n });\n}\nfunction analyzeCode(parsedDiff, prDetails) {\n return __awaiter(this, void 0, void 0, function* () {\n const comments = [];\n for (const file of parsedDiff) {\n if (file.to === \"/dev/null\")\n continue; // Ignore deleted files\n for (const chunk of file.chunks) {\n const prompt = createPrompt(file, chunk, prDetails);\n const aiResponse = yield getAIResponse(prompt);\n if (aiResponse) {\n const newComments = createComment(file, chunk, aiResponse);\n if (newComments) {\n comments.push(...newComments);\n }\n }\n }\n }\n return comments;\n });\n}\nfunction createPrompt(file, chunk, prDetails) {\n return `Your task is to review pull requests. Instructions:\n- Provide the response in following JSON format: {\"reviews\": [{\"lineNumber\": , \"reviewComment\": \"\"}]}\n- Do not give positive comments or compliments.\n- Provide comments and suggestions ONLY if there is something to improve, otherwise \"reviews\" should be an empty array.\n- Write the comment in GitHub Markdown format.\n- Use the given description only for the overall context and only comment the code.\n- IMPORTANT: NEVER suggest adding comments to the code.\n\nReview the following code diff in the file \"${file.to}\" and take the pull request title and description into account when writing the response.\n \nPull request title: ${prDetails.title}\nPull request description:\n\n---\n${prDetails.description}\n---\n\nGit diff to review:\n\n\\`\\`\\`diff\n${chunk.content}\n${chunk.changes\n // @ts-expect-error - ln and ln2 exists where needed\n .map((c) => `${c.ln ? c.ln : c.ln2} ${c.content}`)\n .join(\"\\n\")}\n\\`\\`\\`\n`;\n}\nfunction getAIResponse(prompt) {\n var _a, _b;\n return __awaiter(this, void 0, void 0, function* () {\n const queryConfig = {\n model: OPENAI_API_MODEL,\n temperature: 0.2,\n max_tokens: 700,\n top_p: 1,\n frequency_penalty: 0,\n presence_penalty: 0,\n };\n try {\n const response = yield openai.chat.completions.create(Object.assign(Object.assign(Object.assign({}, queryConfig), (OPENAI_API_MODEL === \"gpt-4-1106-preview\"\n ? { response_format: { type: \"json_object\" } }\n : {})), { messages: [\n {\n role: \"system\",\n content: prompt,\n },\n ] }));\n const res = ((_b = (_a = response.choices[0].message) === null || _a === void 0 ? void 0 : _a.content) === null || _b === void 0 ? void 0 : _b.trim()) || \"{}\";\n return JSON.parse(res).reviews;\n }\n catch (error) {\n console.error(\"Error:\", error);\n return null;\n }\n });\n}\nfunction createComment(file, chunk, aiResponses) {\n return aiResponses.flatMap((aiResponse) => {\n if (!file.to) {\n return [];\n }\n return {\n body: aiResponse.reviewComment,\n path: file.to,\n line: Number(aiResponse.lineNumber),\n };\n });\n}\nfunction createReviewComment(owner, repo, pull_number, comments) {\n return __awaiter(this, void 0, void 0, function* () {\n yield octokit.pulls.createReview({\n owner,\n repo,\n pull_number,\n comments,\n event: \"COMMENT\",\n });\n });\n}\nfunction main() {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const prDetails = yield getPRDetails();\n let diff;\n const eventData = JSON.parse((0, fs_1.readFileSync)((_a = process.env.GITHUB_EVENT_PATH) !== null && _a !== void 0 ? _a : \"\", \"utf8\"));\n if (eventData.action === \"opened\") {\n diff = yield getDiff(prDetails.owner, prDetails.repo, prDetails.pull_number);\n }\n else if (eventData.action === \"synchronize\") {\n const newBaseSha = eventData.before;\n const newHeadSha = eventData.after;\n const response = yield octokit.repos.compareCommits({\n headers: {\n accept: \"application/vnd.github.v3.diff\",\n },\n owner: prDetails.owner,\n repo: prDetails.repo,\n base: newBaseSha,\n head: newHeadSha,\n });\n diff = String(response.data);\n }\n else {\n console.log(\"Unsupported event:\", process.env.GITHUB_EVENT_NAME);\n return;\n }\n if (!diff) {\n console.log(\"No diff found\");\n return;\n }\n const parsedDiff = (0, parse_diff_1.default)(diff);\n const excludePatterns = core\n .getInput(\"exclude\")\n .split(\",\")\n .map((s) => s.trim());\n const filteredDiff = parsedDiff.filter((file) => {\n return !excludePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : \"\", pattern); });\n });\n const comments = yield analyzeCode(filteredDiff, prDetails);\n if (comments.length > 0) {\n yield createReviewComment(prDetails.owner, prDetails.repo, prDetails.pull_number, comments);\n }\n });\n}\nmain().catch((error) => {\n console.error(\"Error:\", error);\n process.exit(1);\n});\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n * ::name key=value,key=value::message\n *\n * Examples:\n * ::warning::This is the message\n * ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n const cmd = new Command(command, properties, message);\n process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n constructor(command, properties, message) {\n if (!command) {\n command = 'missing.command';\n }\n this.command = command;\n this.properties = properties;\n this.message = message;\n }\n toString() {\n let cmdStr = CMD_STRING + this.command;\n if (this.properties && Object.keys(this.properties).length > 0) {\n cmdStr += ' ';\n let first = true;\n for (const key in this.properties) {\n if (this.properties.hasOwnProperty(key)) {\n const val = this.properties[key];\n if (val) {\n if (first) {\n first = false;\n }\n else {\n cmdStr += ',';\n }\n cmdStr += `${key}=${escapeProperty(val)}`;\n }\n }\n }\n }\n cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n return cmdStr;\n }\n}\nfunction escapeData(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n return utils_1.toCommandValue(s)\n .replace(/%/g, '%25')\n .replace(/\\r/g, '%0D')\n .replace(/\\n/g, '%0A')\n .replace(/:/g, '%3A')\n .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n /**\n * A code indicating that the action was successful\n */\n ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n /**\n * A code indicating that the action was a failure\n */\n ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n const convertedVal = utils_1.toCommandValue(val);\n process.env[name] = convertedVal;\n const filePath = process.env['GITHUB_ENV'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n }\n command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n const filePath = process.env['GITHUB_PATH'] || '';\n if (filePath) {\n file_command_1.issueFileCommand('PATH', inputPath);\n }\n else {\n command_1.issueCommand('add-path', {}, inputPath);\n }\n process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string\n */\nfunction getInput(name, options) {\n const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n if (options && options.required && !val) {\n throw new Error(`Input required and not supplied: ${name}`);\n }\n if (options && options.trimWhitespace === false) {\n return val;\n }\n return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input. Each value is also trimmed.\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns string[]\n *\n */\nfunction getMultilineInput(name, options) {\n const inputs = getInput(name, options)\n .split('\\n')\n .filter(x => x !== '');\n if (options && options.trimWhitespace === false) {\n return inputs;\n }\n return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param name name of the input to get\n * @param options optional. See InputOptions.\n * @returns boolean\n */\nfunction getBooleanInput(name, options) {\n const trueValue = ['true', 'True', 'TRUE'];\n const falseValue = ['false', 'False', 'FALSE'];\n const val = getInput(name, options);\n if (trueValue.includes(val))\n return true;\n if (falseValue.includes(val))\n return false;\n throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param name name of the output to set\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n const filePath = process.env['GITHUB_OUTPUT'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n }\n process.stdout.write(os.EOL);\n command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n process.exitCode = ExitCode.Failure;\n error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n return __awaiter(this, void 0, void 0, function* () {\n startGroup(name);\n let result;\n try {\n result = yield fn();\n }\n finally {\n endGroup();\n }\n return result;\n });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param name name of the state to store\n * @param value value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n const filePath = process.env['GITHUB_STATE'] || '';\n if (filePath) {\n return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n }\n command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param name name of the state to get\n * @returns string\n */\nfunction getState(name) {\n return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n return __awaiter(this, void 0, void 0, function* () {\n return yield oidc_utils_1.OidcClient.getIDToken(aud);\n });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n const filePath = process.env[`GITHUB_${command}`];\n if (!filePath) {\n throw new Error(`Unable to find environment variable for file command ${command}`);\n }\n if (!fs.existsSync(filePath)) {\n throw new Error(`Missing file at path: ${filePath}`);\n }\n fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n encoding: 'utf8'\n });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n const convertedValue = utils_1.toCommandValue(value);\n // These should realistically never happen, but just in case someone finds a\n // way to exploit uuid generation let's not allow keys or values that contain\n // the delimiter.\n if (key.includes(delimiter)) {\n throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n }\n if (convertedValue.includes(delimiter)) {\n throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n }\n return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n static createHttpClient(allowRetry = true, maxRetry = 10) {\n const requestOptions = {\n allowRetries: allowRetry,\n maxRetries: maxRetry\n };\n return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n }\n static getRequestToken() {\n const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n if (!token) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n }\n return token;\n }\n static getIDTokenUrl() {\n const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n if (!runtimeUrl) {\n throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n }\n return runtimeUrl;\n }\n static getCall(id_token_url) {\n var _a;\n return __awaiter(this, void 0, void 0, function* () {\n const httpclient = OidcClient.createHttpClient();\n const res = yield httpclient\n .getJson(id_token_url)\n .catch(error => {\n throw new Error(`Failed to get ID Token. \\n \n Error Code : ${error.statusCode}\\n \n Error Message: ${error.result.message}`);\n });\n const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n if (!id_token) {\n throw new Error('Response json body do not have ID Token field');\n }\n return id_token;\n });\n }\n static getIDToken(audience) {\n return __awaiter(this, void 0, void 0, function* () {\n try {\n // New ID Token is requested from action service\n let id_token_url = OidcClient.getIDTokenUrl();\n if (audience) {\n const encodedAudience = encodeURIComponent(audience);\n id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n }\n core_1.debug(`ID token url is ${id_token_url}`);\n const id_token = yield OidcClient.getCall(id_token_url);\n core_1.setSecret(id_token);\n return id_token;\n }\n catch (error) {\n throw new Error(`Error message: ${error.message}`);\n }\n });\n }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n constructor() {\n this._buffer = '';\n }\n /**\n * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n * Also checks r/w permissions.\n *\n * @returns step summary file path\n */\n filePath() {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._filePath) {\n return this._filePath;\n }\n const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n if (!pathFromEnv) {\n throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n }\n try {\n yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n }\n catch (_a) {\n throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n }\n this._filePath = pathFromEnv;\n return this._filePath;\n });\n }\n /**\n * Wraps content in an HTML tag, adding any HTML attributes\n *\n * @param {string} tag HTML tag to wrap\n * @param {string | null} content content within the tag\n * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n *\n * @returns {string} content wrapped in HTML element\n */\n wrap(tag, content, attrs = {}) {\n const htmlAttrs = Object.entries(attrs)\n .map(([key, value]) => ` ${key}=\"${value}\"`)\n .join('');\n if (!content) {\n return `<${tag}${htmlAttrs}>`;\n }\n return `<${tag}${htmlAttrs}>${content}`;\n }\n /**\n * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n *\n * @param {SummaryWriteOptions} [options] (optional) options for write operation\n *\n * @returns {Promise} summary instance\n */\n write(options) {\n return __awaiter(this, void 0, void 0, function* () {\n const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n const filePath = yield this.filePath();\n const writeFunc = overwrite ? writeFile : appendFile;\n yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n return this.emptyBuffer();\n });\n }\n /**\n * Clears the summary buffer and wipes the summary file\n *\n * @returns {Summary} summary instance\n */\n clear() {\n return __awaiter(this, void 0, void 0, function* () {\n return this.emptyBuffer().write({ overwrite: true });\n });\n }\n /**\n * Returns the current summary buffer as a string\n *\n * @returns {string} string of summary buffer\n */\n stringify() {\n return this._buffer;\n }\n /**\n * If the summary buffer is empty\n *\n * @returns {boolen} true if the buffer is empty\n */\n isEmptyBuffer() {\n return this._buffer.length === 0;\n }\n /**\n * Resets the summary buffer without writing to summary file\n *\n * @returns {Summary} summary instance\n */\n emptyBuffer() {\n this._buffer = '';\n return this;\n }\n /**\n * Adds raw text to the summary buffer\n *\n * @param {string} text content to add\n * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n *\n * @returns {Summary} summary instance\n */\n addRaw(text, addEOL = false) {\n this._buffer += text;\n return addEOL ? this.addEOL() : this;\n }\n /**\n * Adds the operating system-specific end-of-line marker to the buffer\n *\n * @returns {Summary} summary instance\n */\n addEOL() {\n return this.addRaw(os_1.EOL);\n }\n /**\n * Adds an HTML codeblock to the summary buffer\n *\n * @param {string} code content to render within fenced code block\n * @param {string} lang (optional) language to syntax highlight code\n *\n * @returns {Summary} summary instance\n */\n addCodeBlock(code, lang) {\n const attrs = Object.assign({}, (lang && { lang }));\n const element = this.wrap('pre', this.wrap('code', code), attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML list to the summary buffer\n *\n * @param {string[]} items list of items to render\n * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n *\n * @returns {Summary} summary instance\n */\n addList(items, ordered = false) {\n const tag = ordered ? 'ol' : 'ul';\n const listItems = items.map(item => this.wrap('li', item)).join('');\n const element = this.wrap(tag, listItems);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML table to the summary buffer\n *\n * @param {SummaryTableCell[]} rows table rows\n *\n * @returns {Summary} summary instance\n */\n addTable(rows) {\n const tableBody = rows\n .map(row => {\n const cells = row\n .map(cell => {\n if (typeof cell === 'string') {\n return this.wrap('td', cell);\n }\n const { header, data, colspan, rowspan } = cell;\n const tag = header ? 'th' : 'td';\n const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n return this.wrap(tag, data, attrs);\n })\n .join('');\n return this.wrap('tr', cells);\n })\n .join('');\n const element = this.wrap('table', tableBody);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds a collapsable HTML details element to the summary buffer\n *\n * @param {string} label text for the closed state\n * @param {string} content collapsable content\n *\n * @returns {Summary} summary instance\n */\n addDetails(label, content) {\n const element = this.wrap('details', this.wrap('summary', label) + content);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML image tag to the summary buffer\n *\n * @param {string} src path to the image you to embed\n * @param {string} alt text description of the image\n * @param {SummaryImageOptions} options (optional) addition image attributes\n *\n * @returns {Summary} summary instance\n */\n addImage(src, alt, options) {\n const { width, height } = options || {};\n const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML section heading element\n *\n * @param {string} text heading text\n * @param {number | string} [level=1] (optional) the heading level, default: 1\n *\n * @returns {Summary} summary instance\n */\n addHeading(text, level) {\n const tag = `h${level}`;\n const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n ? tag\n : 'h1';\n const element = this.wrap(allowedTag, text);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nconst VERSION = \"4.2.0\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy,\n ...otherOptions\n } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"7.0.5\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"5.0.5\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n // Expose the errors and response data in their shorthand properties.\n this.errors = response.errors;\n this.data = response.data;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n return response.data.data;\n });\n}\n\nfunction withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"6.0.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/dependabot/alerts\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/required_workflows\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/actions/variables\", \"GET /orgs/{org}/actions/variables/{name}/repositories\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/codespaces/secrets\", \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/dependabot/alerts\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/members/{username}/codespaces\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{org}/{repo}/actions/required_workflows\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/variables\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/alerts\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /repositories/{repository_id}/environments/{environment_name}/variables\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/ssh_signing_keys\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/ssh_signing_keys\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"1.0.4\";\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction requestLog(octokit) {\n octokit.hook.wrap(\"request\", (request, options) => {\n octokit.log.debug(\"request\", options);\n const start = Date.now();\n const requestOptions = octokit.request.endpoint.parse(options);\n const path = requestOptions.url.replace(options.baseUrl, \"\");\n return request(options).then(response => {\n octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);\n return response;\n }).catch(error => {\n octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);\n throw error;\n });\n });\n}\nrequestLog.VERSION = VERSION;\n\nexports.requestLog = requestLog;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n addSelectedRepoToOrgVariable: [\"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"],\n addSelectedRepoToRequiredWorkflow: [\"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createEnvironmentVariable: [\"POST /repositories/{repository_id}/environments/{environment_name}/variables\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteEnvironmentVariable: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteRepoVariable: [\"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"],\n deleteRequiredWorkflow: [\"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getEnvironmentVariable: [\"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"],\n getRepoRequiredWorkflowUsage: [\"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listEnvironmentVariables: [\"GET /repositories/{repository_id}/environments/{environment_name}/variables\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoRequiredWorkflows: [\"GET /repos/{org}/{repo}/actions/required_workflows\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedReposForOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelectedRepositoriesRequiredWorkflow: [\"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n removeSelectedRepoFromOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"],\n removeSelectedRepoFromRequiredWorkflow: [\"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedReposForOrgVariable: [\"PUT /orgs/{org}/actions/variables/{name}/repositories\"],\n setSelectedReposToRequiredWorkflow: [\"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"],\n updateEnvironmentVariable: [\"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"],\n updateRequiredWorkflow: [\"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getCodeqlDatabase: [\"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listCodeqlDatabases: [\"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getCodespacesForUserInOrg: [\"GET /orgs/{org}/members/{username}/codespaces\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"],\n preFlightWithRepoForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/new\"],\n publishForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/publish\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/dependabot/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\"PUT /orgs/{org}/security-managers/teams/{team_slug}\"],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\"POST /orgs/{org}/{security_product}/{enablement}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n removeSecurityManagerTeam: [\"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteDeploymentBranchPolicy: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n getSecurityAnalysisSettingsForEnterprise: [\"GET /enterprises/{enterprise}/code_security_and_analysis\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n patchSecurityAnalysisSettingsForEnterprise: [\"PATCH /enterprises/{enterprise}/code_security_and_analysis\"],\n postSecurityProductEnablementForEnterprise: [\"POST /enterprises/{enterprise}/{security_product}/{enablement}\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSshSigningKeyForAuthenticatedUser: [\"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"7.0.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n });\n }\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"6.2.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n },\n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;else if (error.name === \"AbortError\") throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar core = require('@octokit/core');\nvar pluginRequestLog = require('@octokit/plugin-request-log');\nvar pluginPaginateRest = require('@octokit/plugin-paginate-rest');\nvar pluginRestEndpointMethods = require('@octokit/plugin-rest-endpoint-methods');\n\nconst VERSION = \"19.0.7\";\n\nconst Octokit = core.Octokit.plugin(pluginRequestLog.requestLog, pluginRestEndpointMethods.legacyRestEndpointMethods, pluginPaginateRest.paginateRest).defaults({\n userAgent: `octokit-rest.js/${VERSION}`\n});\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","'use strict';\n\nmodule.exports = require('./lib/agent');\nmodule.exports.HttpsAgent = require('./lib/https_agent');\nmodule.exports.constants = require('./lib/constants');\n","'use strict';\n\nconst OriginalAgent = require('http').Agent;\nconst ms = require('humanize-ms');\nconst debug = require('util').debuglog('agentkeepalive');\nconst {\n INIT_SOCKET,\n CURRENT_ID,\n CREATE_ID,\n SOCKET_CREATED_TIME,\n SOCKET_NAME,\n SOCKET_REQUEST_COUNT,\n SOCKET_REQUEST_FINISHED_COUNT,\n} = require('./constants');\n\n// OriginalAgent come from\n// - https://github.com/nodejs/node/blob/v8.12.0/lib/_http_agent.js\n// - https://github.com/nodejs/node/blob/v10.12.0/lib/_http_agent.js\n\n// node <= 10\nlet defaultTimeoutListenerCount = 1;\nconst majorVersion = parseInt(process.version.split('.', 1)[0].substring(1));\nif (majorVersion >= 11 && majorVersion <= 12) {\n defaultTimeoutListenerCount = 2;\n} else if (majorVersion >= 13) {\n defaultTimeoutListenerCount = 3;\n}\n\nfunction deprecate(message) {\n console.log('[agentkeepalive:deprecated] %s', message);\n}\n\nclass Agent extends OriginalAgent {\n constructor(options) {\n options = options || {};\n options.keepAlive = options.keepAlive !== false;\n // default is keep-alive and 4s free socket timeout\n // see https://medium.com/ssense-tech/reduce-networking-errors-in-nodejs-23b4eb9f2d83\n if (options.freeSocketTimeout === undefined) {\n options.freeSocketTimeout = 4000;\n }\n // Legacy API: keepAliveTimeout should be rename to `freeSocketTimeout`\n if (options.keepAliveTimeout) {\n deprecate('options.keepAliveTimeout is deprecated, please use options.freeSocketTimeout instead');\n options.freeSocketTimeout = options.keepAliveTimeout;\n delete options.keepAliveTimeout;\n }\n // Legacy API: freeSocketKeepAliveTimeout should be rename to `freeSocketTimeout`\n if (options.freeSocketKeepAliveTimeout) {\n deprecate('options.freeSocketKeepAliveTimeout is deprecated, please use options.freeSocketTimeout instead');\n options.freeSocketTimeout = options.freeSocketKeepAliveTimeout;\n delete options.freeSocketKeepAliveTimeout;\n }\n\n // Sets the socket to timeout after timeout milliseconds of inactivity on the socket.\n // By default is double free socket timeout.\n if (options.timeout === undefined) {\n // make sure socket default inactivity timeout >= 8s\n options.timeout = Math.max(options.freeSocketTimeout * 2, 8000);\n }\n\n // support humanize format\n options.timeout = ms(options.timeout);\n options.freeSocketTimeout = ms(options.freeSocketTimeout);\n options.socketActiveTTL = options.socketActiveTTL ? ms(options.socketActiveTTL) : 0;\n\n super(options);\n\n this[CURRENT_ID] = 0;\n\n // create socket success counter\n this.createSocketCount = 0;\n this.createSocketCountLastCheck = 0;\n\n this.createSocketErrorCount = 0;\n this.createSocketErrorCountLastCheck = 0;\n\n this.closeSocketCount = 0;\n this.closeSocketCountLastCheck = 0;\n\n // socket error event count\n this.errorSocketCount = 0;\n this.errorSocketCountLastCheck = 0;\n\n // request finished counter\n this.requestCount = 0;\n this.requestCountLastCheck = 0;\n\n // including free socket timeout counter\n this.timeoutSocketCount = 0;\n this.timeoutSocketCountLastCheck = 0;\n\n this.on('free', socket => {\n // https://github.com/nodejs/node/pull/32000\n // Node.js native agent will check socket timeout eqs agent.options.timeout.\n // Use the ttl or freeSocketTimeout to overwrite.\n const timeout = this.calcSocketTimeout(socket);\n if (timeout > 0 && socket.timeout !== timeout) {\n socket.setTimeout(timeout);\n }\n });\n }\n\n get freeSocketKeepAliveTimeout() {\n deprecate('agent.freeSocketKeepAliveTimeout is deprecated, please use agent.options.freeSocketTimeout instead');\n return this.options.freeSocketTimeout;\n }\n\n get timeout() {\n deprecate('agent.timeout is deprecated, please use agent.options.timeout instead');\n return this.options.timeout;\n }\n\n get socketActiveTTL() {\n deprecate('agent.socketActiveTTL is deprecated, please use agent.options.socketActiveTTL instead');\n return this.options.socketActiveTTL;\n }\n\n calcSocketTimeout(socket) {\n /**\n * return <= 0: should free socket\n * return > 0: should update socket timeout\n * return undefined: not find custom timeout\n */\n let freeSocketTimeout = this.options.freeSocketTimeout;\n const socketActiveTTL = this.options.socketActiveTTL;\n if (socketActiveTTL) {\n // check socketActiveTTL\n const aliveTime = Date.now() - socket[SOCKET_CREATED_TIME];\n const diff = socketActiveTTL - aliveTime;\n if (diff <= 0) {\n return diff;\n }\n if (freeSocketTimeout && diff < freeSocketTimeout) {\n freeSocketTimeout = diff;\n }\n }\n // set freeSocketTimeout\n if (freeSocketTimeout) {\n // set free keepalive timer\n // try to use socket custom freeSocketTimeout first, support headers['keep-alive']\n // https://github.com/node-modules/urllib/blob/b76053020923f4d99a1c93cf2e16e0c5ba10bacf/lib/urllib.js#L498\n const customFreeSocketTimeout = socket.freeSocketTimeout || socket.freeSocketKeepAliveTimeout;\n return customFreeSocketTimeout || freeSocketTimeout;\n }\n }\n\n keepSocketAlive(socket) {\n const result = super.keepSocketAlive(socket);\n // should not keepAlive, do nothing\n if (!result) return result;\n\n const customTimeout = this.calcSocketTimeout(socket);\n if (typeof customTimeout === 'undefined') {\n return true;\n }\n if (customTimeout <= 0) {\n debug('%s(requests: %s, finished: %s) free but need to destroy by TTL, request count %s, diff is %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], customTimeout);\n return false;\n }\n if (socket.timeout !== customTimeout) {\n socket.setTimeout(customTimeout);\n }\n return true;\n }\n\n // only call on addRequest\n reuseSocket(...args) {\n // reuseSocket(socket, req)\n super.reuseSocket(...args);\n const socket = args[0];\n const req = args[1];\n req.reusedSocket = true;\n const agentTimeout = this.options.timeout;\n if (getSocketTimeout(socket) !== agentTimeout) {\n // reset timeout before use\n socket.setTimeout(agentTimeout);\n debug('%s reset timeout to %sms', socket[SOCKET_NAME], agentTimeout);\n }\n socket[SOCKET_REQUEST_COUNT]++;\n debug('%s(requests: %s, finished: %s) reuse on addRequest, timeout %sms',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n getSocketTimeout(socket));\n }\n\n [CREATE_ID]() {\n const id = this[CURRENT_ID]++;\n if (this[CURRENT_ID] === Number.MAX_SAFE_INTEGER) this[CURRENT_ID] = 0;\n return id;\n }\n\n [INIT_SOCKET](socket, options) {\n // bugfix here.\n // https on node 8, 10 won't set agent.options.timeout by default\n // TODO: need to fix on node itself\n if (options.timeout) {\n const timeout = getSocketTimeout(socket);\n if (!timeout) {\n socket.setTimeout(options.timeout);\n }\n }\n\n if (this.options.keepAlive) {\n // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/\n // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html\n socket.setNoDelay(true);\n }\n this.createSocketCount++;\n if (this.options.socketActiveTTL) {\n socket[SOCKET_CREATED_TIME] = Date.now();\n }\n // don't show the hole '-----BEGIN CERTIFICATE----' key string\n socket[SOCKET_NAME] = `sock[${this[CREATE_ID]()}#${options._agentKey}]`.split('-----BEGIN', 1)[0];\n socket[SOCKET_REQUEST_COUNT] = 1;\n socket[SOCKET_REQUEST_FINISHED_COUNT] = 0;\n installListeners(this, socket, options);\n }\n\n createConnection(options, oncreate) {\n let called = false;\n const onNewCreate = (err, socket) => {\n if (called) return;\n called = true;\n\n if (err) {\n this.createSocketErrorCount++;\n return oncreate(err);\n }\n this[INIT_SOCKET](socket, options);\n oncreate(err, socket);\n };\n\n const newSocket = super.createConnection(options, onNewCreate);\n if (newSocket) onNewCreate(null, newSocket);\n return newSocket;\n }\n\n get statusChanged() {\n const changed = this.createSocketCount !== this.createSocketCountLastCheck ||\n this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||\n this.closeSocketCount !== this.closeSocketCountLastCheck ||\n this.errorSocketCount !== this.errorSocketCountLastCheck ||\n this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||\n this.requestCount !== this.requestCountLastCheck;\n if (changed) {\n this.createSocketCountLastCheck = this.createSocketCount;\n this.createSocketErrorCountLastCheck = this.createSocketErrorCount;\n this.closeSocketCountLastCheck = this.closeSocketCount;\n this.errorSocketCountLastCheck = this.errorSocketCount;\n this.timeoutSocketCountLastCheck = this.timeoutSocketCount;\n this.requestCountLastCheck = this.requestCount;\n }\n return changed;\n }\n\n getCurrentStatus() {\n return {\n createSocketCount: this.createSocketCount,\n createSocketErrorCount: this.createSocketErrorCount,\n closeSocketCount: this.closeSocketCount,\n errorSocketCount: this.errorSocketCount,\n timeoutSocketCount: this.timeoutSocketCount,\n requestCount: this.requestCount,\n freeSockets: inspect(this.freeSockets),\n sockets: inspect(this.sockets),\n requests: inspect(this.requests),\n };\n }\n}\n\n// node 8 don't has timeout attribute on socket\n// https://github.com/nodejs/node/pull/21204/files#diff-e6ef024c3775d787c38487a6309e491dR408\nfunction getSocketTimeout(socket) {\n return socket.timeout || socket._idleTimeout;\n}\n\nfunction installListeners(agent, socket, options) {\n debug('%s create, timeout %sms', socket[SOCKET_NAME], getSocketTimeout(socket));\n\n // listener socket events: close, timeout, error, free\n function onFree() {\n // create and socket.emit('free') logic\n // https://github.com/nodejs/node/blob/master/lib/_http_agent.js#L311\n // no req on the socket, it should be the new socket\n if (!socket._httpMessage && socket[SOCKET_REQUEST_COUNT] === 1) return;\n\n socket[SOCKET_REQUEST_FINISHED_COUNT]++;\n agent.requestCount++;\n debug('%s(requests: %s, finished: %s) free',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n\n // should reuse on pedding requests?\n const name = agent.getName(options);\n if (socket.writable && agent.requests[name] && agent.requests[name].length) {\n // will be reuse on agent free listener\n socket[SOCKET_REQUEST_COUNT]++;\n debug('%s(requests: %s, finished: %s) will be reuse on agent free event',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n }\n }\n socket.on('free', onFree);\n\n function onClose(isError) {\n debug('%s(requests: %s, finished: %s) close, isError: %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], isError);\n agent.closeSocketCount++;\n }\n socket.on('close', onClose);\n\n // start socket timeout handler\n function onTimeout() {\n // onTimeout and emitRequestTimeout(_http_client.js)\n // https://github.com/nodejs/node/blob/v12.x/lib/_http_client.js#L711\n const listenerCount = socket.listeners('timeout').length;\n // node <= 10, default listenerCount is 1, onTimeout\n // 11 < node <= 12, default listenerCount is 2, onTimeout and emitRequestTimeout\n // node >= 13, default listenerCount is 3, onTimeout,\n // onTimeout(https://github.com/nodejs/node/pull/32000/files#diff-5f7fb0850412c6be189faeddea6c5359R333)\n // and emitRequestTimeout\n const timeout = getSocketTimeout(socket);\n const req = socket._httpMessage;\n const reqTimeoutListenerCount = req && req.listeners('timeout').length || 0;\n debug('%s(requests: %s, finished: %s) timeout after %sms, listeners %s, defaultTimeoutListenerCount %s, hasHttpRequest %s, HttpRequest timeoutListenerCount %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n timeout, listenerCount, defaultTimeoutListenerCount, !!req, reqTimeoutListenerCount);\n if (debug.enabled) {\n debug('timeout listeners: %s', socket.listeners('timeout').map(f => f.name).join(', '));\n }\n agent.timeoutSocketCount++;\n const name = agent.getName(options);\n if (agent.freeSockets[name] && agent.freeSockets[name].indexOf(socket) !== -1) {\n // free socket timeout, destroy quietly\n socket.destroy();\n // Remove it from freeSockets list immediately to prevent new requests\n // from being sent through this socket.\n agent.removeSocket(socket, options);\n debug('%s is free, destroy quietly', socket[SOCKET_NAME]);\n } else {\n // if there is no any request socket timeout handler,\n // agent need to handle socket timeout itself.\n //\n // custom request socket timeout handle logic must follow these rules:\n // 1. Destroy socket first\n // 2. Must emit socket 'agentRemove' event tell agent remove socket\n // from freeSockets list immediately.\n // Otherise you may be get 'socket hang up' error when reuse\n // free socket and timeout happen in the same time.\n if (reqTimeoutListenerCount === 0) {\n const error = new Error('Socket timeout');\n error.code = 'ERR_SOCKET_TIMEOUT';\n error.timeout = timeout;\n // must manually call socket.end() or socket.destroy() to end the connection.\n // https://nodejs.org/dist/latest-v10.x/docs/api/net.html#net_socket_settimeout_timeout_callback\n socket.destroy(error);\n agent.removeSocket(socket, options);\n debug('%s destroy with timeout error', socket[SOCKET_NAME]);\n }\n }\n }\n socket.on('timeout', onTimeout);\n\n function onError(err) {\n const listenerCount = socket.listeners('error').length;\n debug('%s(requests: %s, finished: %s) error: %s, listenerCount: %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n err, listenerCount);\n agent.errorSocketCount++;\n if (listenerCount === 1) {\n // if socket don't contain error event handler, don't catch it, emit it again\n debug('%s emit uncaught error event', socket[SOCKET_NAME]);\n socket.removeListener('error', onError);\n socket.emit('error', err);\n }\n }\n socket.on('error', onError);\n\n function onRemove() {\n debug('%s(requests: %s, finished: %s) agentRemove',\n socket[SOCKET_NAME],\n socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n // We need this function for cases like HTTP 'upgrade'\n // (defined by WebSockets) where we need to remove a socket from the\n // pool because it'll be locked up indefinitely\n socket.removeListener('close', onClose);\n socket.removeListener('error', onError);\n socket.removeListener('free', onFree);\n socket.removeListener('timeout', onTimeout);\n socket.removeListener('agentRemove', onRemove);\n }\n socket.on('agentRemove', onRemove);\n}\n\nmodule.exports = Agent;\n\nfunction inspect(obj) {\n const res = {};\n for (const key in obj) {\n res[key] = obj[key].length;\n }\n return res;\n}\n","'use strict';\n\nmodule.exports = {\n // agent\n CURRENT_ID: Symbol('agentkeepalive#currentId'),\n CREATE_ID: Symbol('agentkeepalive#createId'),\n INIT_SOCKET: Symbol('agentkeepalive#initSocket'),\n CREATE_HTTPS_CONNECTION: Symbol('agentkeepalive#createHttpsConnection'),\n // socket\n SOCKET_CREATED_TIME: Symbol('agentkeepalive#socketCreatedTime'),\n SOCKET_NAME: Symbol('agentkeepalive#socketName'),\n SOCKET_REQUEST_COUNT: Symbol('agentkeepalive#socketRequestCount'),\n SOCKET_REQUEST_FINISHED_COUNT: Symbol('agentkeepalive#socketRequestFinishedCount'),\n};\n","'use strict';\n\nconst OriginalHttpsAgent = require('https').Agent;\nconst HttpAgent = require('./agent');\nconst {\n INIT_SOCKET,\n CREATE_HTTPS_CONNECTION,\n} = require('./constants');\n\nclass HttpsAgent extends HttpAgent {\n constructor(options) {\n super(options);\n\n this.defaultPort = 443;\n this.protocol = 'https:';\n this.maxCachedSessions = this.options.maxCachedSessions;\n /* istanbul ignore next */\n if (this.maxCachedSessions === undefined) {\n this.maxCachedSessions = 100;\n }\n\n this._sessionCache = {\n map: {},\n list: [],\n };\n }\n\n createConnection(options, oncreate) {\n const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate);\n this[INIT_SOCKET](socket, options);\n return socket;\n }\n}\n\n// https://github.com/nodejs/node/blob/master/lib/https.js#L89\nHttpsAgent.prototype[CREATE_HTTPS_CONNECTION] = OriginalHttpsAgent.prototype.createConnection;\n\n[\n 'getName',\n '_getSession',\n '_cacheSession',\n // https://github.com/nodejs/node/pull/4982\n '_evictSession',\n].forEach(function(method) {\n /* istanbul ignore next */\n if (typeof OriginalHttpsAgent.prototype[method] === 'function') {\n HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method];\n }\n});\n\nmodule.exports = HttpsAgent;\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n if(a===b) {\n return [ai, bi];\n }\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","var balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m) return [str];\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n if (/\\$$/.test(m.pre)) { \n for (var k = 0; k < post.length; k++) {\n var expansion = pre+ '{' + m.body + '}' + post[k];\n expansions.push(expansion);\n }\n } else {\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = [];\n\n for (var j = 0; j < n.length; j++) {\n N.push.apply(N, expand(n[j], false));\n }\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n }\n\n return expansions;\n}\n\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","/**\n * @license\n * web-streams-polyfill v4.0.0-beta.3\n * Copyright 2021 Mattias Buelens, Diwank Singh Tomer and other contributors.\n * This code is released under the MIT license.\n * SPDX-License-Identifier: MIT\n */\n!function(e,t){\"object\"==typeof exports&&\"undefined\"!=typeof module?t(exports):\"function\"==typeof define&&define.amd?define([\"exports\"],t):t((e=\"undefined\"!=typeof globalThis?globalThis:e||self).WebStreamsPolyfill={})}(this,(function(e){\"use strict\";const t=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?Symbol:e=>`Symbol(${e})`;function r(){}function o(e){return\"object\"==typeof e&&null!==e||\"function\"==typeof e}const n=r;function a(e,t){try{Object.defineProperty(e,\"name\",{value:t,configurable:!0})}catch(e){}}const i=Promise,l=Promise.prototype.then,s=Promise.resolve.bind(i),u=Promise.reject.bind(i);function c(e){return new i(e)}function d(e){return s(e)}function f(e){return u(e)}function b(e,t,r){return l.call(e,t,r)}function h(e,t,r){b(b(e,t,r),void 0,n)}function _(e,t){h(e,t)}function p(e,t){h(e,void 0,t)}function m(e,t,r){return b(e,t,r)}function y(e){b(e,void 0,n)}let g=e=>{if(\"function\"==typeof queueMicrotask)g=queueMicrotask;else{const e=d(void 0);g=t=>b(e,t)}return g(e)};function S(e,t,r){if(\"function\"!=typeof e)throw new TypeError(\"Argument is not a function\");return Function.prototype.apply.call(e,t,r)}function w(e,t,r){try{return d(S(e,t,r))}catch(e){return f(e)}}class v{constructor(){this._cursor=0,this._size=0,this._front={_elements:[],_next:void 0},this._back=this._front,this._cursor=0,this._size=0}get length(){return this._size}push(e){const t=this._back;let r=t;16383===t._elements.length&&(r={_elements:[],_next:void 0}),t._elements.push(e),r!==t&&(this._back=r,t._next=r),++this._size}shift(){const e=this._front;let t=e;const r=this._cursor;let o=r+1;const n=e._elements,a=n[r];return 16384===o&&(t=e._next,o=0),--this._size,this._cursor=o,e!==t&&(this._front=t),n[r]=void 0,a}forEach(e){let t=this._cursor,r=this._front,o=r._elements;for(;!(t===o.length&&void 0===r._next||t===o.length&&(r=r._next,o=r._elements,t=0,0===o.length));)e(o[t]),++t}peek(){const e=this._front,t=this._cursor;return e._elements[t]}}const R=t(\"[[AbortSteps]]\"),T=t(\"[[ErrorSteps]]\"),q=t(\"[[CancelSteps]]\"),C=t(\"[[PullSteps]]\"),P=t(\"[[ReleaseSteps]]\");function E(e,t){e._ownerReadableStream=t,t._reader=e,\"readable\"===t._state?B(e):\"closed\"===t._state?function(e){B(e),z(e)}(e):A(e,t._storedError)}function W(e,t){return Xt(e._ownerReadableStream,t)}function O(e){const t=e._ownerReadableStream;\"readable\"===t._state?j(e,new TypeError(\"Reader was released and can no longer be used to monitor the stream's closedness\")):function(e,t){A(e,t)}(e,new TypeError(\"Reader was released and can no longer be used to monitor the stream's closedness\")),t._readableStreamController[P](),t._reader=void 0,e._ownerReadableStream=void 0}function k(e){return new TypeError(\"Cannot \"+e+\" a stream using a released reader\")}function B(e){e._closedPromise=c(((t,r)=>{e._closedPromise_resolve=t,e._closedPromise_reject=r}))}function A(e,t){B(e),j(e,t)}function j(e,t){void 0!==e._closedPromise_reject&&(y(e._closedPromise),e._closedPromise_reject(t),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0)}function z(e){void 0!==e._closedPromise_resolve&&(e._closedPromise_resolve(void 0),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0)}const L=Number.isFinite||function(e){return\"number\"==typeof e&&isFinite(e)},F=Math.trunc||function(e){return e<0?Math.ceil(e):Math.floor(e)};function D(e,t){if(void 0!==e&&(\"object\"!=typeof(r=e)&&\"function\"!=typeof r))throw new TypeError(`${t} is not an object.`);var r}function I(e,t){if(\"function\"!=typeof e)throw new TypeError(`${t} is not a function.`)}function $(e,t){if(!function(e){return\"object\"==typeof e&&null!==e||\"function\"==typeof e}(e))throw new TypeError(`${t} is not an object.`)}function M(e,t,r){if(void 0===e)throw new TypeError(`Parameter ${t} is required in '${r}'.`)}function Y(e,t,r){if(void 0===e)throw new TypeError(`${t} is required in '${r}'.`)}function Q(e){return Number(e)}function N(e){return 0===e?0:e}function x(e,t){const r=Number.MAX_SAFE_INTEGER;let o=Number(e);if(o=N(o),!L(o))throw new TypeError(`${t} is not a finite number`);if(o=function(e){return N(F(e))}(o),o<0||o>r)throw new TypeError(`${t} is outside the accepted range of 0 to ${r}, inclusive`);return L(o)&&0!==o?o:0}function H(e){if(!o(e))return!1;if(\"function\"!=typeof e.getReader)return!1;try{return\"boolean\"==typeof e.locked}catch(e){return!1}}function V(e){if(!o(e))return!1;if(\"function\"!=typeof e.getWriter)return!1;try{return\"boolean\"==typeof e.locked}catch(e){return!1}}function U(e,t){if(!Ut(e))throw new TypeError(`${t} is not a ReadableStream.`)}function G(e,t){e._reader._readRequests.push(t)}function X(e,t,r){const o=e._reader._readRequests.shift();r?o._closeSteps():o._chunkSteps(t)}function J(e){return e._reader._readRequests.length}function K(e){const t=e._reader;return void 0!==t&&!!Z(t)}class ReadableStreamDefaultReader{constructor(e){if(M(e,1,\"ReadableStreamDefaultReader\"),U(e,\"First parameter\"),Gt(e))throw new TypeError(\"This stream has already been locked for exclusive reading by another reader\");E(this,e),this._readRequests=new v}get closed(){return Z(this)?this._closedPromise:f(te(\"closed\"))}cancel(e){return Z(this)?void 0===this._ownerReadableStream?f(k(\"cancel\")):W(this,e):f(te(\"cancel\"))}read(){if(!Z(this))return f(te(\"read\"));if(void 0===this._ownerReadableStream)return f(k(\"read from\"));let e,t;const r=c(((r,o)=>{e=r,t=o}));return function(e,t){const r=e._ownerReadableStream;r._disturbed=!0,\"closed\"===r._state?t._closeSteps():\"errored\"===r._state?t._errorSteps(r._storedError):r._readableStreamController[C](t)}(this,{_chunkSteps:t=>e({value:t,done:!1}),_closeSteps:()=>e({value:void 0,done:!0}),_errorSteps:e=>t(e)}),r}releaseLock(){if(!Z(this))throw te(\"releaseLock\");void 0!==this._ownerReadableStream&&function(e){O(e);const t=new TypeError(\"Reader was released\");ee(e,t)}(this)}}function Z(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readRequests\")&&e instanceof ReadableStreamDefaultReader)}function ee(e,t){const r=e._readRequests;e._readRequests=new v,r.forEach((e=>{e._errorSteps(t)}))}function te(e){return new TypeError(`ReadableStreamDefaultReader.prototype.${e} can only be used on a ReadableStreamDefaultReader`)}Object.defineProperties(ReadableStreamDefaultReader.prototype,{cancel:{enumerable:!0},read:{enumerable:!0},releaseLock:{enumerable:!0},closed:{enumerable:!0}}),a(ReadableStreamDefaultReader.prototype.cancel,\"cancel\"),a(ReadableStreamDefaultReader.prototype.read,\"read\"),a(ReadableStreamDefaultReader.prototype.releaseLock,\"releaseLock\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamDefaultReader.prototype,t.toStringTag,{value:\"ReadableStreamDefaultReader\",configurable:!0});class re{constructor(e,t){this._ongoingPromise=void 0,this._isFinished=!1,this._reader=e,this._preventCancel=t}next(){const e=()=>this._nextSteps();return this._ongoingPromise=this._ongoingPromise?m(this._ongoingPromise,e,e):e(),this._ongoingPromise}return(e){const t=()=>this._returnSteps(e);return this._ongoingPromise?m(this._ongoingPromise,t,t):t()}_nextSteps(){if(this._isFinished)return Promise.resolve({value:void 0,done:!0});const e=this._reader;return void 0===e?f(k(\"iterate\")):b(e.read(),(e=>{var t;return this._ongoingPromise=void 0,e.done&&(this._isFinished=!0,null===(t=this._reader)||void 0===t||t.releaseLock(),this._reader=void 0),e}),(e=>{var t;throw this._ongoingPromise=void 0,this._isFinished=!0,null===(t=this._reader)||void 0===t||t.releaseLock(),this._reader=void 0,e}))}_returnSteps(e){if(this._isFinished)return Promise.resolve({value:e,done:!0});this._isFinished=!0;const t=this._reader;if(void 0===t)return f(k(\"finish iterating\"));if(this._reader=void 0,!this._preventCancel){const r=t.cancel(e);return t.releaseLock(),m(r,(()=>({value:e,done:!0})))}return t.releaseLock(),d({value:e,done:!0})}}const oe={next(){return ne(this)?this._asyncIteratorImpl.next():f(ae(\"next\"))},return(e){return ne(this)?this._asyncIteratorImpl.return(e):f(ae(\"return\"))}};function ne(e){if(!o(e))return!1;if(!Object.prototype.hasOwnProperty.call(e,\"_asyncIteratorImpl\"))return!1;try{return e._asyncIteratorImpl instanceof re}catch(e){return!1}}function ae(e){return new TypeError(`ReadableStreamAsyncIterator.${e} can only be used on a ReadableSteamAsyncIterator`)}\"symbol\"==typeof t.asyncIterator&&Object.defineProperty(oe,t.asyncIterator,{value(){return this},writable:!0,configurable:!0});const ie=Number.isNaN||function(e){return e!=e};function le(e,t,r,o,n){new Uint8Array(e).set(new Uint8Array(r,o,n),t)}function se(e){const t=function(e,t,r){if(e.slice)return e.slice(t,r);const o=r-t,n=new ArrayBuffer(o);return le(n,0,e,t,o),n}(e.buffer,e.byteOffset,e.byteOffset+e.byteLength);return new Uint8Array(t)}function ue(e){const t=e._queue.shift();return e._queueTotalSize-=t.size,e._queueTotalSize<0&&(e._queueTotalSize=0),t.value}function ce(e,t,r){if(\"number\"!=typeof(o=r)||ie(o)||o<0||r===1/0)throw new RangeError(\"Size must be a finite, non-NaN, non-negative number.\");var o;e._queue.push({value:t,size:r}),e._queueTotalSize+=r}function de(e){e._queue=new v,e._queueTotalSize=0}class ReadableStreamBYOBRequest{constructor(){throw new TypeError(\"Illegal constructor\")}get view(){if(!be(this))throw Ae(\"view\");return this._view}respond(e){if(!be(this))throw Ae(\"respond\");if(M(e,1,\"respond\"),e=x(e,\"First parameter\"),void 0===this._associatedReadableByteStreamController)throw new TypeError(\"This BYOB request has been invalidated\");this._view.buffer,function(e,t){const r=e._pendingPullIntos.peek();if(\"closed\"===e._controlledReadableByteStream._state){if(0!==t)throw new TypeError(\"bytesWritten must be 0 when calling respond() on a closed stream\")}else{if(0===t)throw new TypeError(\"bytesWritten must be greater than 0 when calling respond() on a readable stream\");if(r.bytesFilled+t>r.byteLength)throw new RangeError(\"bytesWritten out of range\")}r.buffer=r.buffer,Ce(e,t)}(this._associatedReadableByteStreamController,e)}respondWithNewView(e){if(!be(this))throw Ae(\"respondWithNewView\");if(M(e,1,\"respondWithNewView\"),!ArrayBuffer.isView(e))throw new TypeError(\"You can only respond with array buffer views\");if(void 0===this._associatedReadableByteStreamController)throw new TypeError(\"This BYOB request has been invalidated\");e.buffer,function(e,t){const r=e._pendingPullIntos.peek();if(\"closed\"===e._controlledReadableByteStream._state){if(0!==t.byteLength)throw new TypeError(\"The view's length must be 0 when calling respondWithNewView() on a closed stream\")}else if(0===t.byteLength)throw new TypeError(\"The view's length must be greater than 0 when calling respondWithNewView() on a readable stream\");if(r.byteOffset+r.bytesFilled!==t.byteOffset)throw new RangeError(\"The region specified by view does not match byobRequest\");if(r.bufferByteLength!==t.buffer.byteLength)throw new RangeError(\"The buffer of view has different capacity than byobRequest\");if(r.bytesFilled+t.byteLength>r.byteLength)throw new RangeError(\"The region specified by view is larger than byobRequest\");const o=t.byteLength;r.buffer=t.buffer,Ce(e,o)}(this._associatedReadableByteStreamController,e)}}Object.defineProperties(ReadableStreamBYOBRequest.prototype,{respond:{enumerable:!0},respondWithNewView:{enumerable:!0},view:{enumerable:!0}}),a(ReadableStreamBYOBRequest.prototype.respond,\"respond\"),a(ReadableStreamBYOBRequest.prototype.respondWithNewView,\"respondWithNewView\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamBYOBRequest.prototype,t.toStringTag,{value:\"ReadableStreamBYOBRequest\",configurable:!0});class ReadableByteStreamController{constructor(){throw new TypeError(\"Illegal constructor\")}get byobRequest(){if(!fe(this))throw je(\"byobRequest\");return function(e){if(null===e._byobRequest&&e._pendingPullIntos.length>0){const t=e._pendingPullIntos.peek(),r=new Uint8Array(t.buffer,t.byteOffset+t.bytesFilled,t.byteLength-t.bytesFilled),o=Object.create(ReadableStreamBYOBRequest.prototype);!function(e,t,r){e._associatedReadableByteStreamController=t,e._view=r}(o,e,r),e._byobRequest=o}return e._byobRequest}(this)}get desiredSize(){if(!fe(this))throw je(\"desiredSize\");return ke(this)}close(){if(!fe(this))throw je(\"close\");if(this._closeRequested)throw new TypeError(\"The stream has already been closed; do not close it again!\");const e=this._controlledReadableByteStream._state;if(\"readable\"!==e)throw new TypeError(`The stream (in ${e} state) is not in the readable state and cannot be closed`);!function(e){const t=e._controlledReadableByteStream;if(e._closeRequested||\"readable\"!==t._state)return;if(e._queueTotalSize>0)return void(e._closeRequested=!0);if(e._pendingPullIntos.length>0){if(e._pendingPullIntos.peek().bytesFilled>0){const t=new TypeError(\"Insufficient bytes to fill elements in the given buffer\");throw We(e,t),t}}Ee(e),Jt(t)}(this)}enqueue(e){if(!fe(this))throw je(\"enqueue\");if(M(e,1,\"enqueue\"),!ArrayBuffer.isView(e))throw new TypeError(\"chunk must be an array buffer view\");if(0===e.byteLength)throw new TypeError(\"chunk must have non-zero byteLength\");if(0===e.buffer.byteLength)throw new TypeError(\"chunk's buffer must have non-zero byteLength\");if(this._closeRequested)throw new TypeError(\"stream is closed or draining\");const t=this._controlledReadableByteStream._state;if(\"readable\"!==t)throw new TypeError(`The stream (in ${t} state) is not in the readable state and cannot be enqueued to`);!function(e,t){const r=e._controlledReadableByteStream;if(e._closeRequested||\"readable\"!==r._state)return;const o=t.buffer,n=t.byteOffset,a=t.byteLength,i=o;if(e._pendingPullIntos.length>0){const t=e._pendingPullIntos.peek();t.buffer,0,Te(e),t.buffer=t.buffer,\"none\"===t.readerType&&Se(e,t)}if(K(r))if(function(e){const t=e._controlledReadableByteStream._reader;for(;t._readRequests.length>0;){if(0===e._queueTotalSize)return;Oe(e,t._readRequests.shift())}}(e),0===J(r))ye(e,i,n,a);else{e._pendingPullIntos.length>0&&Pe(e);X(r,new Uint8Array(i,n,a),!1)}else Fe(r)?(ye(e,i,n,a),qe(e)):ye(e,i,n,a);he(e)}(this,e)}error(e){if(!fe(this))throw je(\"error\");We(this,e)}[q](e){_e(this),de(this);const t=this._cancelAlgorithm(e);return Ee(this),t}[C](e){const t=this._controlledReadableByteStream;if(this._queueTotalSize>0)return void Oe(this,e);const r=this._autoAllocateChunkSize;if(void 0!==r){let t;try{t=new ArrayBuffer(r)}catch(t){return void e._errorSteps(t)}const o={buffer:t,bufferByteLength:r,byteOffset:0,byteLength:r,bytesFilled:0,elementSize:1,viewConstructor:Uint8Array,readerType:\"default\"};this._pendingPullIntos.push(o)}G(t,e),he(this)}[P](){if(this._pendingPullIntos.length>0){const e=this._pendingPullIntos.peek();e.readerType=\"none\",this._pendingPullIntos=new v,this._pendingPullIntos.push(e)}}}function fe(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledReadableByteStream\")&&e instanceof ReadableByteStreamController)}function be(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_associatedReadableByteStreamController\")&&e instanceof ReadableStreamBYOBRequest)}function he(e){const t=function(e){const t=e._controlledReadableByteStream;if(\"readable\"!==t._state)return!1;if(e._closeRequested)return!1;if(!e._started)return!1;if(K(t)&&J(t)>0)return!0;if(Fe(t)&&Le(t)>0)return!0;if(ke(e)>0)return!0;return!1}(e);if(!t)return;if(e._pulling)return void(e._pullAgain=!0);e._pulling=!0;h(e._pullAlgorithm(),(()=>(e._pulling=!1,e._pullAgain&&(e._pullAgain=!1,he(e)),null)),(t=>(We(e,t),null)))}function _e(e){Te(e),e._pendingPullIntos=new v}function pe(e,t){let r=!1;\"closed\"===e._state&&(r=!0);const o=me(t);\"default\"===t.readerType?X(e,o,r):function(e,t,r){const o=e._reader._readIntoRequests.shift();r?o._closeSteps(t):o._chunkSteps(t)}(e,o,r)}function me(e){const t=e.bytesFilled,r=e.elementSize;return new e.viewConstructor(e.buffer,e.byteOffset,t/r)}function ye(e,t,r,o){e._queue.push({buffer:t,byteOffset:r,byteLength:o}),e._queueTotalSize+=o}function ge(e,t,r,o){let n;try{n=t.slice(r,r+o)}catch(t){throw We(e,t),t}ye(e,n,0,o)}function Se(e,t){t.bytesFilled>0&&ge(e,t.buffer,t.byteOffset,t.bytesFilled),Pe(e)}function we(e,t){const r=t.elementSize,o=t.bytesFilled-t.bytesFilled%r,n=Math.min(e._queueTotalSize,t.byteLength-t.bytesFilled),a=t.bytesFilled+n,i=a-a%r;let l=n,s=!1;i>o&&(l=i-t.bytesFilled,s=!0);const u=e._queue;for(;l>0;){const r=u.peek(),o=Math.min(l,r.byteLength),n=t.byteOffset+t.bytesFilled;le(t.buffer,n,r.buffer,r.byteOffset,o),r.byteLength===o?u.shift():(r.byteOffset+=o,r.byteLength-=o),e._queueTotalSize-=o,ve(e,o,t),l-=o}return s}function ve(e,t,r){r.bytesFilled+=t}function Re(e){0===e._queueTotalSize&&e._closeRequested?(Ee(e),Jt(e._controlledReadableByteStream)):he(e)}function Te(e){null!==e._byobRequest&&(e._byobRequest._associatedReadableByteStreamController=void 0,e._byobRequest._view=null,e._byobRequest=null)}function qe(e){for(;e._pendingPullIntos.length>0;){if(0===e._queueTotalSize)return;const t=e._pendingPullIntos.peek();we(e,t)&&(Pe(e),pe(e._controlledReadableByteStream,t))}}function Ce(e,t){const r=e._pendingPullIntos.peek();Te(e);\"closed\"===e._controlledReadableByteStream._state?function(e,t){\"none\"===t.readerType&&Pe(e);const r=e._controlledReadableByteStream;if(Fe(r))for(;Le(r)>0;)pe(r,Pe(e))}(e,r):function(e,t,r){if(ve(0,t,r),\"none\"===r.readerType)return Se(e,r),void qe(e);if(r.bytesFilled0){const t=r.byteOffset+r.bytesFilled;ge(e,r.buffer,t-o,o)}r.bytesFilled-=o,pe(e._controlledReadableByteStream,r),qe(e)}(e,t,r),he(e)}function Pe(e){return e._pendingPullIntos.shift()}function Ee(e){e._pullAlgorithm=void 0,e._cancelAlgorithm=void 0}function We(e,t){const r=e._controlledReadableByteStream;\"readable\"===r._state&&(_e(e),de(e),Ee(e),Kt(r,t))}function Oe(e,t){const r=e._queue.shift();e._queueTotalSize-=r.byteLength,Re(e);const o=new Uint8Array(r.buffer,r.byteOffset,r.byteLength);t._chunkSteps(o)}function ke(e){const t=e._controlledReadableByteStream._state;return\"errored\"===t?null:\"closed\"===t?0:e._strategyHWM-e._queueTotalSize}function Be(e,t,r){const o=Object.create(ReadableByteStreamController.prototype);let n,a,i;n=void 0!==t.start?()=>t.start(o):()=>{},a=void 0!==t.pull?()=>t.pull(o):()=>d(void 0),i=void 0!==t.cancel?e=>t.cancel(e):()=>d(void 0);const l=t.autoAllocateChunkSize;if(0===l)throw new TypeError(\"autoAllocateChunkSize must be greater than 0\");!function(e,t,r,o,n,a,i){t._controlledReadableByteStream=e,t._pullAgain=!1,t._pulling=!1,t._byobRequest=null,t._queue=t._queueTotalSize=void 0,de(t),t._closeRequested=!1,t._started=!1,t._strategyHWM=a,t._pullAlgorithm=o,t._cancelAlgorithm=n,t._autoAllocateChunkSize=i,t._pendingPullIntos=new v,e._readableStreamController=t,h(d(r()),(()=>(t._started=!0,he(t),null)),(e=>(We(t,e),null)))}(e,o,n,a,i,r,l)}function Ae(e){return new TypeError(`ReadableStreamBYOBRequest.prototype.${e} can only be used on a ReadableStreamBYOBRequest`)}function je(e){return new TypeError(`ReadableByteStreamController.prototype.${e} can only be used on a ReadableByteStreamController`)}function ze(e,t){e._reader._readIntoRequests.push(t)}function Le(e){return e._reader._readIntoRequests.length}function Fe(e){const t=e._reader;return void 0!==t&&!!De(t)}Object.defineProperties(ReadableByteStreamController.prototype,{close:{enumerable:!0},enqueue:{enumerable:!0},error:{enumerable:!0},byobRequest:{enumerable:!0},desiredSize:{enumerable:!0}}),a(ReadableByteStreamController.prototype.close,\"close\"),a(ReadableByteStreamController.prototype.enqueue,\"enqueue\"),a(ReadableByteStreamController.prototype.error,\"error\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableByteStreamController.prototype,t.toStringTag,{value:\"ReadableByteStreamController\",configurable:!0});class ReadableStreamBYOBReader{constructor(e){if(M(e,1,\"ReadableStreamBYOBReader\"),U(e,\"First parameter\"),Gt(e))throw new TypeError(\"This stream has already been locked for exclusive reading by another reader\");if(!fe(e._readableStreamController))throw new TypeError(\"Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte source\");E(this,e),this._readIntoRequests=new v}get closed(){return De(this)?this._closedPromise:f($e(\"closed\"))}cancel(e){return De(this)?void 0===this._ownerReadableStream?f(k(\"cancel\")):W(this,e):f($e(\"cancel\"))}read(e){if(!De(this))return f($e(\"read\"));if(!ArrayBuffer.isView(e))return f(new TypeError(\"view must be an array buffer view\"));if(0===e.byteLength)return f(new TypeError(\"view must have non-zero byteLength\"));if(0===e.buffer.byteLength)return f(new TypeError(\"view's buffer must have non-zero byteLength\"));if(e.buffer,void 0===this._ownerReadableStream)return f(k(\"read from\"));let t,r;const o=c(((e,o)=>{t=e,r=o}));return function(e,t,r){const o=e._ownerReadableStream;o._disturbed=!0,\"errored\"===o._state?r._errorSteps(o._storedError):function(e,t,r){const o=e._controlledReadableByteStream;let n=1;t.constructor!==DataView&&(n=t.constructor.BYTES_PER_ELEMENT);const a=t.constructor,i=t.buffer,l={buffer:i,bufferByteLength:i.byteLength,byteOffset:t.byteOffset,byteLength:t.byteLength,bytesFilled:0,elementSize:n,viewConstructor:a,readerType:\"byob\"};if(e._pendingPullIntos.length>0)return e._pendingPullIntos.push(l),void ze(o,r);if(\"closed\"!==o._state){if(e._queueTotalSize>0){if(we(e,l)){const t=me(l);return Re(e),void r._chunkSteps(t)}if(e._closeRequested){const t=new TypeError(\"Insufficient bytes to fill elements in the given buffer\");return We(e,t),void r._errorSteps(t)}}e._pendingPullIntos.push(l),ze(o,r),he(e)}else{const e=new a(l.buffer,l.byteOffset,0);r._closeSteps(e)}}(o._readableStreamController,t,r)}(this,e,{_chunkSteps:e=>t({value:e,done:!1}),_closeSteps:e=>t({value:e,done:!0}),_errorSteps:e=>r(e)}),o}releaseLock(){if(!De(this))throw $e(\"releaseLock\");void 0!==this._ownerReadableStream&&function(e){O(e);const t=new TypeError(\"Reader was released\");Ie(e,t)}(this)}}function De(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readIntoRequests\")&&e instanceof ReadableStreamBYOBReader)}function Ie(e,t){const r=e._readIntoRequests;e._readIntoRequests=new v,r.forEach((e=>{e._errorSteps(t)}))}function $e(e){return new TypeError(`ReadableStreamBYOBReader.prototype.${e} can only be used on a ReadableStreamBYOBReader`)}function Me(e,t){const{highWaterMark:r}=e;if(void 0===r)return t;if(ie(r)||r<0)throw new RangeError(\"Invalid highWaterMark\");return r}function Ye(e){const{size:t}=e;return t||(()=>1)}function Qe(e,t){D(e,t);const r=null==e?void 0:e.highWaterMark,o=null==e?void 0:e.size;return{highWaterMark:void 0===r?void 0:Q(r),size:void 0===o?void 0:Ne(o,`${t} has member 'size' that`)}}function Ne(e,t){return I(e,t),t=>Q(e(t))}function xe(e,t,r){return I(e,r),r=>w(e,t,[r])}function He(e,t,r){return I(e,r),()=>w(e,t,[])}function Ve(e,t,r){return I(e,r),r=>S(e,t,[r])}function Ue(e,t,r){return I(e,r),(r,o)=>w(e,t,[r,o])}Object.defineProperties(ReadableStreamBYOBReader.prototype,{cancel:{enumerable:!0},read:{enumerable:!0},releaseLock:{enumerable:!0},closed:{enumerable:!0}}),a(ReadableStreamBYOBReader.prototype.cancel,\"cancel\"),a(ReadableStreamBYOBReader.prototype.read,\"read\"),a(ReadableStreamBYOBReader.prototype.releaseLock,\"releaseLock\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamBYOBReader.prototype,t.toStringTag,{value:\"ReadableStreamBYOBReader\",configurable:!0});const Ge=\"function\"==typeof AbortController;class WritableStream{constructor(e={},t={}){void 0===e?e=null:$(e,\"First parameter\");const r=Qe(t,\"Second parameter\"),o=function(e,t){D(e,t);const r=null==e?void 0:e.abort,o=null==e?void 0:e.close,n=null==e?void 0:e.start,a=null==e?void 0:e.type,i=null==e?void 0:e.write;return{abort:void 0===r?void 0:xe(r,e,`${t} has member 'abort' that`),close:void 0===o?void 0:He(o,e,`${t} has member 'close' that`),start:void 0===n?void 0:Ve(n,e,`${t} has member 'start' that`),write:void 0===i?void 0:Ue(i,e,`${t} has member 'write' that`),type:a}}(e,\"First parameter\");var n;(n=this)._state=\"writable\",n._storedError=void 0,n._writer=void 0,n._writableStreamController=void 0,n._writeRequests=new v,n._inFlightWriteRequest=void 0,n._closeRequest=void 0,n._inFlightCloseRequest=void 0,n._pendingAbortRequest=void 0,n._backpressure=!1;if(void 0!==o.type)throw new RangeError(\"Invalid type is specified\");const a=Ye(r);!function(e,t,r,o){const n=Object.create(WritableStreamDefaultController.prototype);let a,i,l,s;a=void 0!==t.start?()=>t.start(n):()=>{};i=void 0!==t.write?e=>t.write(e,n):()=>d(void 0);l=void 0!==t.close?()=>t.close():()=>d(void 0);s=void 0!==t.abort?e=>t.abort(e):()=>d(void 0);!function(e,t,r,o,n,a,i,l){t._controlledWritableStream=e,e._writableStreamController=t,t._queue=void 0,t._queueTotalSize=void 0,de(t),t._abortReason=void 0,t._abortController=function(){if(Ge)return new AbortController}(),t._started=!1,t._strategySizeAlgorithm=l,t._strategyHWM=i,t._writeAlgorithm=o,t._closeAlgorithm=n,t._abortAlgorithm=a;const s=ht(t);at(e,s);const u=r();h(d(u),(()=>(t._started=!0,ft(t),null)),(r=>(t._started=!0,et(e,r),null)))}(e,n,a,i,l,s,r,o)}(this,o,Me(r,1),a)}get locked(){if(!Xe(this))throw pt(\"locked\");return Je(this)}abort(e){return Xe(this)?Je(this)?f(new TypeError(\"Cannot abort a stream that already has a writer\")):Ke(this,e):f(pt(\"abort\"))}close(){return Xe(this)?Je(this)?f(new TypeError(\"Cannot close a stream that already has a writer\")):ot(this)?f(new TypeError(\"Cannot close an already-closing stream\")):Ze(this):f(pt(\"close\"))}getWriter(){if(!Xe(this))throw pt(\"getWriter\");return new WritableStreamDefaultWriter(this)}}function Xe(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_writableStreamController\")&&e instanceof WritableStream)}function Je(e){return void 0!==e._writer}function Ke(e,t){var r;if(\"closed\"===e._state||\"errored\"===e._state)return d(void 0);e._writableStreamController._abortReason=t,null===(r=e._writableStreamController._abortController)||void 0===r||r.abort(t);const o=e._state;if(\"closed\"===o||\"errored\"===o)return d(void 0);if(void 0!==e._pendingAbortRequest)return e._pendingAbortRequest._promise;let n=!1;\"erroring\"===o&&(n=!0,t=void 0);const a=c(((r,o)=>{e._pendingAbortRequest={_promise:void 0,_resolve:r,_reject:o,_reason:t,_wasAlreadyErroring:n}}));return e._pendingAbortRequest._promise=a,n||tt(e,t),a}function Ze(e){const t=e._state;if(\"closed\"===t||\"errored\"===t)return f(new TypeError(`The stream (in ${t} state) is not in the writable state and cannot be closed`));const r=c(((t,r)=>{const o={_resolve:t,_reject:r};e._closeRequest=o})),o=e._writer;var n;return void 0!==o&&e._backpressure&&\"writable\"===t&&Et(o),ce(n=e._writableStreamController,st,0),ft(n),r}function et(e,t){\"writable\"!==e._state?rt(e):tt(e,t)}function tt(e,t){const r=e._writableStreamController;e._state=\"erroring\",e._storedError=t;const o=e._writer;void 0!==o&<(o,t),!function(e){if(void 0===e._inFlightWriteRequest&&void 0===e._inFlightCloseRequest)return!1;return!0}(e)&&r._started&&rt(e)}function rt(e){e._state=\"errored\",e._writableStreamController[T]();const t=e._storedError;if(e._writeRequests.forEach((e=>{e._reject(t)})),e._writeRequests=new v,void 0===e._pendingAbortRequest)return void nt(e);const r=e._pendingAbortRequest;if(e._pendingAbortRequest=void 0,r._wasAlreadyErroring)return r._reject(t),void nt(e);h(e._writableStreamController[R](r._reason),(()=>(r._resolve(),nt(e),null)),(t=>(r._reject(t),nt(e),null)))}function ot(e){return void 0!==e._closeRequest||void 0!==e._inFlightCloseRequest}function nt(e){void 0!==e._closeRequest&&(e._closeRequest._reject(e._storedError),e._closeRequest=void 0);const t=e._writer;void 0!==t&&vt(t,e._storedError)}function at(e,t){const r=e._writer;void 0!==r&&t!==e._backpressure&&(t?function(e){Tt(e)}(r):Et(r)),e._backpressure=t}Object.defineProperties(WritableStream.prototype,{abort:{enumerable:!0},close:{enumerable:!0},getWriter:{enumerable:!0},locked:{enumerable:!0}}),a(WritableStream.prototype.abort,\"abort\"),a(WritableStream.prototype.close,\"close\"),a(WritableStream.prototype.getWriter,\"getWriter\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStream.prototype,t.toStringTag,{value:\"WritableStream\",configurable:!0});class WritableStreamDefaultWriter{constructor(e){if(M(e,1,\"WritableStreamDefaultWriter\"),function(e,t){if(!Xe(e))throw new TypeError(`${t} is not a WritableStream.`)}(e,\"First parameter\"),Je(e))throw new TypeError(\"This stream has already been locked for exclusive writing by another writer\");this._ownerWritableStream=e,e._writer=this;const t=e._state;if(\"writable\"===t)!ot(e)&&e._backpressure?Tt(this):Ct(this),St(this);else if(\"erroring\"===t)qt(this,e._storedError),St(this);else if(\"closed\"===t)Ct(this),St(r=this),Rt(r);else{const t=e._storedError;qt(this,t),wt(this,t)}var r}get closed(){return it(this)?this._closedPromise:f(yt(\"closed\"))}get desiredSize(){if(!it(this))throw yt(\"desiredSize\");if(void 0===this._ownerWritableStream)throw gt(\"desiredSize\");return function(e){const t=e._ownerWritableStream,r=t._state;if(\"errored\"===r||\"erroring\"===r)return null;if(\"closed\"===r)return 0;return dt(t._writableStreamController)}(this)}get ready(){return it(this)?this._readyPromise:f(yt(\"ready\"))}abort(e){return it(this)?void 0===this._ownerWritableStream?f(gt(\"abort\")):function(e,t){return Ke(e._ownerWritableStream,t)}(this,e):f(yt(\"abort\"))}close(){if(!it(this))return f(yt(\"close\"));const e=this._ownerWritableStream;return void 0===e?f(gt(\"close\")):ot(e)?f(new TypeError(\"Cannot close an already-closing stream\")):Ze(this._ownerWritableStream)}releaseLock(){if(!it(this))throw yt(\"releaseLock\");void 0!==this._ownerWritableStream&&function(e){const t=e._ownerWritableStream,r=new TypeError(\"Writer was released and can no longer be used to monitor the stream's closedness\");lt(e,r),function(e,t){\"pending\"===e._closedPromiseState?vt(e,t):function(e,t){wt(e,t)}(e,t)}(e,r),t._writer=void 0,e._ownerWritableStream=void 0}(this)}write(e){return it(this)?void 0===this._ownerWritableStream?f(gt(\"write to\")):function(e,t){const r=e._ownerWritableStream,o=r._writableStreamController,n=function(e,t){try{return e._strategySizeAlgorithm(t)}catch(t){return bt(e,t),1}}(o,t);if(r!==e._ownerWritableStream)return f(gt(\"write to\"));const a=r._state;if(\"errored\"===a)return f(r._storedError);if(ot(r)||\"closed\"===a)return f(new TypeError(\"The stream is closing or closed and cannot be written to\"));if(\"erroring\"===a)return f(r._storedError);const i=function(e){return c(((t,r)=>{const o={_resolve:t,_reject:r};e._writeRequests.push(o)}))}(r);return function(e,t,r){try{ce(e,t,r)}catch(t){return void bt(e,t)}const o=e._controlledWritableStream;if(!ot(o)&&\"writable\"===o._state){at(o,ht(e))}ft(e)}(o,t,n),i}(this,e):f(yt(\"write\"))}}function it(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_ownerWritableStream\")&&e instanceof WritableStreamDefaultWriter)}function lt(e,t){\"pending\"===e._readyPromiseState?Pt(e,t):function(e,t){qt(e,t)}(e,t)}Object.defineProperties(WritableStreamDefaultWriter.prototype,{abort:{enumerable:!0},close:{enumerable:!0},releaseLock:{enumerable:!0},write:{enumerable:!0},closed:{enumerable:!0},desiredSize:{enumerable:!0},ready:{enumerable:!0}}),a(WritableStreamDefaultWriter.prototype.abort,\"abort\"),a(WritableStreamDefaultWriter.prototype.close,\"close\"),a(WritableStreamDefaultWriter.prototype.releaseLock,\"releaseLock\"),a(WritableStreamDefaultWriter.prototype.write,\"write\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStreamDefaultWriter.prototype,t.toStringTag,{value:\"WritableStreamDefaultWriter\",configurable:!0});const st={};class WritableStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get abortReason(){if(!ut(this))throw mt(\"abortReason\");return this._abortReason}get signal(){if(!ut(this))throw mt(\"signal\");if(void 0===this._abortController)throw new TypeError(\"WritableStreamDefaultController.prototype.signal is not supported\");return this._abortController.signal}error(e){if(!ut(this))throw mt(\"error\");\"writable\"===this._controlledWritableStream._state&&_t(this,e)}[R](e){const t=this._abortAlgorithm(e);return ct(this),t}[T](){de(this)}}function ut(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledWritableStream\")&&e instanceof WritableStreamDefaultController)}function ct(e){e._writeAlgorithm=void 0,e._closeAlgorithm=void 0,e._abortAlgorithm=void 0,e._strategySizeAlgorithm=void 0}function dt(e){return e._strategyHWM-e._queueTotalSize}function ft(e){const t=e._controlledWritableStream;if(!e._started)return;if(void 0!==t._inFlightWriteRequest)return;if(\"erroring\"===t._state)return void rt(t);if(0===e._queue.length)return;const r=e._queue.peek().value;r===st?function(e){const t=e._controlledWritableStream;(function(e){e._inFlightCloseRequest=e._closeRequest,e._closeRequest=void 0})(t),ue(e);const r=e._closeAlgorithm();ct(e),h(r,(()=>(function(e){e._inFlightCloseRequest._resolve(void 0),e._inFlightCloseRequest=void 0,\"erroring\"===e._state&&(e._storedError=void 0,void 0!==e._pendingAbortRequest&&(e._pendingAbortRequest._resolve(),e._pendingAbortRequest=void 0)),e._state=\"closed\";const t=e._writer;void 0!==t&&Rt(t)}(t),null)),(e=>(function(e,t){e._inFlightCloseRequest._reject(t),e._inFlightCloseRequest=void 0,void 0!==e._pendingAbortRequest&&(e._pendingAbortRequest._reject(t),e._pendingAbortRequest=void 0),et(e,t)}(t,e),null)))}(e):function(e,t){const r=e._controlledWritableStream;!function(e){e._inFlightWriteRequest=e._writeRequests.shift()}(r);h(e._writeAlgorithm(t),(()=>{!function(e){e._inFlightWriteRequest._resolve(void 0),e._inFlightWriteRequest=void 0}(r);const t=r._state;if(ue(e),!ot(r)&&\"writable\"===t){const t=ht(e);at(r,t)}return ft(e),null}),(t=>(\"writable\"===r._state&&ct(e),function(e,t){e._inFlightWriteRequest._reject(t),e._inFlightWriteRequest=void 0,et(e,t)}(r,t),null)))}(e,r)}function bt(e,t){\"writable\"===e._controlledWritableStream._state&&_t(e,t)}function ht(e){return dt(e)<=0}function _t(e,t){const r=e._controlledWritableStream;ct(e),tt(r,t)}function pt(e){return new TypeError(`WritableStream.prototype.${e} can only be used on a WritableStream`)}function mt(e){return new TypeError(`WritableStreamDefaultController.prototype.${e} can only be used on a WritableStreamDefaultController`)}function yt(e){return new TypeError(`WritableStreamDefaultWriter.prototype.${e} can only be used on a WritableStreamDefaultWriter`)}function gt(e){return new TypeError(\"Cannot \"+e+\" a stream using a released writer\")}function St(e){e._closedPromise=c(((t,r)=>{e._closedPromise_resolve=t,e._closedPromise_reject=r,e._closedPromiseState=\"pending\"}))}function wt(e,t){St(e),vt(e,t)}function vt(e,t){void 0!==e._closedPromise_reject&&(y(e._closedPromise),e._closedPromise_reject(t),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0,e._closedPromiseState=\"rejected\")}function Rt(e){void 0!==e._closedPromise_resolve&&(e._closedPromise_resolve(void 0),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0,e._closedPromiseState=\"resolved\")}function Tt(e){e._readyPromise=c(((t,r)=>{e._readyPromise_resolve=t,e._readyPromise_reject=r})),e._readyPromiseState=\"pending\"}function qt(e,t){Tt(e),Pt(e,t)}function Ct(e){Tt(e),Et(e)}function Pt(e,t){void 0!==e._readyPromise_reject&&(y(e._readyPromise),e._readyPromise_reject(t),e._readyPromise_resolve=void 0,e._readyPromise_reject=void 0,e._readyPromiseState=\"rejected\")}function Et(e){void 0!==e._readyPromise_resolve&&(e._readyPromise_resolve(void 0),e._readyPromise_resolve=void 0,e._readyPromise_reject=void 0,e._readyPromiseState=\"fulfilled\")}Object.defineProperties(WritableStreamDefaultController.prototype,{abortReason:{enumerable:!0},signal:{enumerable:!0},error:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStreamDefaultController.prototype,t.toStringTag,{value:\"WritableStreamDefaultController\",configurable:!0});const Wt=\"undefined\"!=typeof DOMException?DOMException:void 0;const Ot=function(e){if(\"function\"!=typeof e&&\"object\"!=typeof e)return!1;try{return new e,!0}catch(e){return!1}}(Wt)?Wt:function(){const e=function(e,t){this.message=e||\"\",this.name=t||\"Error\",Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor)};return e.prototype=Object.create(Error.prototype),Object.defineProperty(e.prototype,\"constructor\",{value:e,writable:!0,configurable:!0}),e}();function kt(e,t,r,o,n,a){const i=e.getReader(),l=t.getWriter();Ut(e)&&(e._disturbed=!0);let s,u,p,S=!1,w=!1,v=\"readable\",R=\"writable\",T=!1,q=!1;const C=c((e=>{p=e}));let P=Promise.resolve(void 0);return c(((E,W)=>{let O;function k(){if(S)return;const e=c(((e,t)=>{!function r(o){o?e():b(function(){if(S)return d(!0);return b(l.ready,(()=>b(i.read(),(e=>!!e.done||(P=l.write(e.value),y(P),!1)))))}(),r,t)}(!1)}));y(e)}function B(){return v=\"closed\",r?L():z((()=>(Xe(t)&&(T=ot(t),R=t._state),T||\"closed\"===R?d(void 0):\"erroring\"===R||\"errored\"===R?f(u):(T=!0,l.close()))),!1,void 0),null}function A(e){return S||(v=\"errored\",s=e,o?L(!0,e):z((()=>l.abort(e)),!0,e)),null}function j(e){return w||(R=\"errored\",u=e,n?L(!0,e):z((()=>i.cancel(e)),!0,e)),null}if(void 0!==a&&(O=()=>{const e=void 0!==a.reason?a.reason:new Ot(\"Aborted\",\"AbortError\"),t=[];o||t.push((()=>\"writable\"===R?l.abort(e):d(void 0))),n||t.push((()=>\"readable\"===v?i.cancel(e):d(void 0))),z((()=>Promise.all(t.map((e=>e())))),!0,e)},a.aborted?O():a.addEventListener(\"abort\",O)),Ut(e)&&(v=e._state,s=e._storedError),Xe(t)&&(R=t._state,u=t._storedError,T=ot(t)),Ut(e)&&Xe(t)&&(q=!0,p()),\"errored\"===v)A(s);else if(\"erroring\"===R||\"errored\"===R)j(u);else if(\"closed\"===v)B();else if(T||\"closed\"===R){const e=new TypeError(\"the destination writable stream closed before all data could be piped to it\");n?L(!0,e):z((()=>i.cancel(e)),!0,e)}function z(e,t,r){function o(){return\"writable\"!==R||T?n():_(function(){let e;return d(function t(){if(e!==P)return e=P,m(P,t,t)}())}(),n),null}function n(){return e?h(e(),(()=>F(t,r)),(e=>F(!0,e))):F(t,r),null}S||(S=!0,q?o():_(C,o))}function L(e,t){z(void 0,e,t)}function F(e,t){return w=!0,l.releaseLock(),i.releaseLock(),void 0!==a&&a.removeEventListener(\"abort\",O),e?W(t):E(void 0),null}S||(h(i.closed,B,A),h(l.closed,(function(){return w||(R=\"closed\"),null}),j)),q?k():g((()=>{q=!0,p(),k()}))}))}function Bt(e,t){return function(e){try{return e.getReader({mode:\"byob\"}).releaseLock(),!0}catch(e){return!1}}(e)?function(e){let t,r,o,n,a,i=e.getReader(),l=!1,s=!1,u=!1,f=!1,b=!1,_=!1;const m=c((e=>{a=e}));function y(e){p(e.closed,(t=>(e!==i||(o.error(t),n.error(t),b&&_||a(void 0)),null)))}function g(){l&&(i.releaseLock(),i=e.getReader(),y(i),l=!1),h(i.read(),(e=>{var t,r;if(u=!1,f=!1,e.done)return b||o.close(),_||n.close(),null===(t=o.byobRequest)||void 0===t||t.respond(0),null===(r=n.byobRequest)||void 0===r||r.respond(0),b&&_||a(void 0),null;const l=e.value,c=l;let d=l;if(!b&&!_)try{d=se(l)}catch(e){return o.error(e),n.error(e),a(i.cancel(e)),null}return b||o.enqueue(c),_||n.enqueue(d),s=!1,u?w():f&&v(),null}),(()=>(s=!1,null)))}function S(t,r){l||(i.releaseLock(),i=e.getReader({mode:\"byob\"}),y(i),l=!0);const c=r?n:o,d=r?o:n;h(i.read(t),(e=>{var t;u=!1,f=!1;const o=r?_:b,n=r?b:_;if(e.done){o||c.close(),n||d.close();const r=e.value;return void 0!==r&&(o||c.byobRequest.respondWithNewView(r),n||null===(t=d.byobRequest)||void 0===t||t.respond(0)),o&&n||a(void 0),null}const l=e.value;if(n)o||c.byobRequest.respondWithNewView(l);else{let e;try{e=se(l)}catch(e){return c.error(e),d.error(e),a(i.cancel(e)),null}o||c.byobRequest.respondWithNewView(l),d.enqueue(e)}return s=!1,u?w():f&&v(),null}),(()=>(s=!1,null)))}function w(){if(s)return u=!0,d(void 0);s=!0;const e=o.byobRequest;return null===e?g():S(e.view,!1),d(void 0)}function v(){if(s)return f=!0,d(void 0);s=!0;const e=n.byobRequest;return null===e?g():S(e.view,!0),d(void 0)}function R(e){if(b=!0,t=e,_){const e=[t,r],o=i.cancel(e);a(o)}return m}function T(e){if(_=!0,r=e,b){const e=[t,r],o=i.cancel(e);a(o)}return m}const q=new ReadableStream({type:\"bytes\",start(e){o=e},pull:w,cancel:R}),C=new ReadableStream({type:\"bytes\",start(e){n=e},pull:v,cancel:T});return y(i),[q,C]}(e):function(e,t){const r=e.getReader();let o,n,a,i,l,s=!1,u=!1,f=!1,b=!1;const _=c((e=>{l=e}));function m(){return s?(u=!0,d(void 0)):(s=!0,h(r.read(),(e=>{if(u=!1,e.done)return f||a.close(),b||i.close(),f&&b||l(void 0),null;const t=e.value,r=t,o=t;return f||a.enqueue(r),b||i.enqueue(o),s=!1,u&&m(),null}),(()=>(s=!1,null))),d(void 0))}function y(e){if(f=!0,o=e,b){const e=[o,n],t=r.cancel(e);l(t)}return _}function g(e){if(b=!0,n=e,f){const e=[o,n],t=r.cancel(e);l(t)}return _}const S=new ReadableStream({start(e){a=e},pull:m,cancel:y}),w=new ReadableStream({start(e){i=e},pull:m,cancel:g});return p(r.closed,(e=>(a.error(e),i.error(e),f&&b||l(void 0),null))),[S,w]}(e)}class ReadableStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get desiredSize(){if(!At(this))throw $t(\"desiredSize\");return Ft(this)}close(){if(!At(this))throw $t(\"close\");if(!Dt(this))throw new TypeError(\"The stream is not in a state that permits close\");!function(e){if(!Dt(e))return;const t=e._controlledReadableStream;e._closeRequested=!0,0===e._queue.length&&(zt(e),Jt(t))}(this)}enqueue(e){if(!At(this))throw $t(\"enqueue\");if(!Dt(this))throw new TypeError(\"The stream is not in a state that permits enqueue\");return function(e,t){if(!Dt(e))return;const r=e._controlledReadableStream;if(Gt(r)&&J(r)>0)X(r,t,!1);else{let r;try{r=e._strategySizeAlgorithm(t)}catch(t){throw Lt(e,t),t}try{ce(e,t,r)}catch(t){throw Lt(e,t),t}}jt(e)}(this,e)}error(e){if(!At(this))throw $t(\"error\");Lt(this,e)}[q](e){de(this);const t=this._cancelAlgorithm(e);return zt(this),t}[C](e){const t=this._controlledReadableStream;if(this._queue.length>0){const r=ue(this);this._closeRequested&&0===this._queue.length?(zt(this),Jt(t)):jt(this),e._chunkSteps(r)}else G(t,e),jt(this)}[P](){}}function At(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledReadableStream\")&&e instanceof ReadableStreamDefaultController)}function jt(e){const t=function(e){const t=e._controlledReadableStream;if(!Dt(e))return!1;if(!e._started)return!1;if(Gt(t)&&J(t)>0)return!0;if(Ft(e)>0)return!0;return!1}(e);if(!t)return;if(e._pulling)return void(e._pullAgain=!0);e._pulling=!0;h(e._pullAlgorithm(),(()=>(e._pulling=!1,e._pullAgain&&(e._pullAgain=!1,jt(e)),null)),(t=>(Lt(e,t),null)))}function zt(e){e._pullAlgorithm=void 0,e._cancelAlgorithm=void 0,e._strategySizeAlgorithm=void 0}function Lt(e,t){const r=e._controlledReadableStream;\"readable\"===r._state&&(de(e),zt(e),Kt(r,t))}function Ft(e){const t=e._controlledReadableStream._state;return\"errored\"===t?null:\"closed\"===t?0:e._strategyHWM-e._queueTotalSize}function Dt(e){return!e._closeRequested&&\"readable\"===e._controlledReadableStream._state}function It(e,t,r,o){const n=Object.create(ReadableStreamDefaultController.prototype);let a,i,l;a=void 0!==t.start?()=>t.start(n):()=>{},i=void 0!==t.pull?()=>t.pull(n):()=>d(void 0),l=void 0!==t.cancel?e=>t.cancel(e):()=>d(void 0),function(e,t,r,o,n,a,i){t._controlledReadableStream=e,t._queue=void 0,t._queueTotalSize=void 0,de(t),t._started=!1,t._closeRequested=!1,t._pullAgain=!1,t._pulling=!1,t._strategySizeAlgorithm=i,t._strategyHWM=a,t._pullAlgorithm=o,t._cancelAlgorithm=n,e._readableStreamController=t,h(d(r()),(()=>(t._started=!0,jt(t),null)),(e=>(Lt(t,e),null)))}(e,n,a,i,l,r,o)}function $t(e){return new TypeError(`ReadableStreamDefaultController.prototype.${e} can only be used on a ReadableStreamDefaultController`)}function Mt(e,t,r){return I(e,r),r=>w(e,t,[r])}function Yt(e,t,r){return I(e,r),r=>w(e,t,[r])}function Qt(e,t,r){return I(e,r),r=>S(e,t,[r])}function Nt(e,t){if(\"bytes\"!==(e=`${e}`))throw new TypeError(`${t} '${e}' is not a valid enumeration value for ReadableStreamType`);return e}function xt(e,t){if(\"byob\"!==(e=`${e}`))throw new TypeError(`${t} '${e}' is not a valid enumeration value for ReadableStreamReaderMode`);return e}function Ht(e,t){D(e,t);const r=null==e?void 0:e.preventAbort,o=null==e?void 0:e.preventCancel,n=null==e?void 0:e.preventClose,a=null==e?void 0:e.signal;return void 0!==a&&function(e,t){if(!function(e){if(\"object\"!=typeof e||null===e)return!1;try{return\"boolean\"==typeof e.aborted}catch(e){return!1}}(e))throw new TypeError(`${t} is not an AbortSignal.`)}(a,`${t} has member 'signal' that`),{preventAbort:Boolean(r),preventCancel:Boolean(o),preventClose:Boolean(n),signal:a}}function Vt(e,t){D(e,t);const r=null==e?void 0:e.readable;Y(r,\"readable\",\"ReadableWritablePair\"),function(e,t){if(!H(e))throw new TypeError(`${t} is not a ReadableStream.`)}(r,`${t} has member 'readable' that`);const o=null==e?void 0:e.writable;return Y(o,\"writable\",\"ReadableWritablePair\"),function(e,t){if(!V(e))throw new TypeError(`${t} is not a WritableStream.`)}(o,`${t} has member 'writable' that`),{readable:r,writable:o}}Object.defineProperties(ReadableStreamDefaultController.prototype,{close:{enumerable:!0},enqueue:{enumerable:!0},error:{enumerable:!0},desiredSize:{enumerable:!0}}),a(ReadableStreamDefaultController.prototype.close,\"close\"),a(ReadableStreamDefaultController.prototype.enqueue,\"enqueue\"),a(ReadableStreamDefaultController.prototype.error,\"error\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamDefaultController.prototype,t.toStringTag,{value:\"ReadableStreamDefaultController\",configurable:!0});class ReadableStream{constructor(e={},t={}){void 0===e?e=null:$(e,\"First parameter\");const r=Qe(t,\"Second parameter\"),o=function(e,t){D(e,t);const r=e,o=null==r?void 0:r.autoAllocateChunkSize,n=null==r?void 0:r.cancel,a=null==r?void 0:r.pull,i=null==r?void 0:r.start,l=null==r?void 0:r.type;return{autoAllocateChunkSize:void 0===o?void 0:x(o,`${t} has member 'autoAllocateChunkSize' that`),cancel:void 0===n?void 0:Mt(n,r,`${t} has member 'cancel' that`),pull:void 0===a?void 0:Yt(a,r,`${t} has member 'pull' that`),start:void 0===i?void 0:Qt(i,r,`${t} has member 'start' that`),type:void 0===l?void 0:Nt(l,`${t} has member 'type' that`)}}(e,\"First parameter\");var n;if((n=this)._state=\"readable\",n._reader=void 0,n._storedError=void 0,n._disturbed=!1,\"bytes\"===o.type){if(void 0!==r.size)throw new RangeError(\"The strategy for a byte stream cannot have a size function\");Be(this,o,Me(r,0))}else{const e=Ye(r);It(this,o,Me(r,1),e)}}get locked(){if(!Ut(this))throw Zt(\"locked\");return Gt(this)}cancel(e){return Ut(this)?Gt(this)?f(new TypeError(\"Cannot cancel a stream that already has a reader\")):Xt(this,e):f(Zt(\"cancel\"))}getReader(e){if(!Ut(this))throw Zt(\"getReader\");return void 0===function(e,t){D(e,t);const r=null==e?void 0:e.mode;return{mode:void 0===r?void 0:xt(r,`${t} has member 'mode' that`)}}(e,\"First parameter\").mode?new ReadableStreamDefaultReader(this):function(e){return new ReadableStreamBYOBReader(e)}(this)}pipeThrough(e,t={}){if(!H(this))throw Zt(\"pipeThrough\");M(e,1,\"pipeThrough\");const r=Vt(e,\"First parameter\"),o=Ht(t,\"Second parameter\");if(this.locked)throw new TypeError(\"ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream\");if(r.writable.locked)throw new TypeError(\"ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream\");return y(kt(this,r.writable,o.preventClose,o.preventAbort,o.preventCancel,o.signal)),r.readable}pipeTo(e,t={}){if(!H(this))return f(Zt(\"pipeTo\"));if(void 0===e)return f(\"Parameter 1 is required in 'pipeTo'.\");if(!V(e))return f(new TypeError(\"ReadableStream.prototype.pipeTo's first argument must be a WritableStream\"));let r;try{r=Ht(t,\"Second parameter\")}catch(e){return f(e)}return this.locked?f(new TypeError(\"ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream\")):e.locked?f(new TypeError(\"ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream\")):kt(this,e,r.preventClose,r.preventAbort,r.preventCancel,r.signal)}tee(){if(!H(this))throw Zt(\"tee\");if(this.locked)throw new TypeError(\"Cannot tee a stream that already has a reader\");return Bt(this)}values(e){if(!H(this))throw Zt(\"values\");return function(e,t){const r=e.getReader(),o=new re(r,t),n=Object.create(oe);return n._asyncIteratorImpl=o,n}(this,function(e,t){D(e,t);const r=null==e?void 0:e.preventCancel;return{preventCancel:Boolean(r)}}(e,\"First parameter\").preventCancel)}}function Ut(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readableStreamController\")&&e instanceof ReadableStream)}function Gt(e){return void 0!==e._reader}function Xt(e,t){if(e._disturbed=!0,\"closed\"===e._state)return d(void 0);if(\"errored\"===e._state)return f(e._storedError);Jt(e);const o=e._reader;if(void 0!==o&&De(o)){const e=o._readIntoRequests;o._readIntoRequests=new v,e.forEach((e=>{e._closeSteps(void 0)}))}return m(e._readableStreamController[q](t),r)}function Jt(e){e._state=\"closed\";const t=e._reader;if(void 0!==t&&(z(t),Z(t))){const e=t._readRequests;t._readRequests=new v,e.forEach((e=>{e._closeSteps()}))}}function Kt(e,t){e._state=\"errored\",e._storedError=t;const r=e._reader;void 0!==r&&(j(r,t),Z(r)?ee(r,t):Ie(r,t))}function Zt(e){return new TypeError(`ReadableStream.prototype.${e} can only be used on a ReadableStream`)}function er(e,t){D(e,t);const r=null==e?void 0:e.highWaterMark;return Y(r,\"highWaterMark\",\"QueuingStrategyInit\"),{highWaterMark:Q(r)}}Object.defineProperties(ReadableStream.prototype,{cancel:{enumerable:!0},getReader:{enumerable:!0},pipeThrough:{enumerable:!0},pipeTo:{enumerable:!0},tee:{enumerable:!0},values:{enumerable:!0},locked:{enumerable:!0}}),a(ReadableStream.prototype.cancel,\"cancel\"),a(ReadableStream.prototype.getReader,\"getReader\"),a(ReadableStream.prototype.pipeThrough,\"pipeThrough\"),a(ReadableStream.prototype.pipeTo,\"pipeTo\"),a(ReadableStream.prototype.tee,\"tee\"),a(ReadableStream.prototype.values,\"values\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStream.prototype,t.toStringTag,{value:\"ReadableStream\",configurable:!0}),\"symbol\"==typeof t.asyncIterator&&Object.defineProperty(ReadableStream.prototype,t.asyncIterator,{value:ReadableStream.prototype.values,writable:!0,configurable:!0});const tr=e=>e.byteLength;a(tr,\"size\");class ByteLengthQueuingStrategy{constructor(e){M(e,1,\"ByteLengthQueuingStrategy\"),e=er(e,\"First parameter\"),this._byteLengthQueuingStrategyHighWaterMark=e.highWaterMark}get highWaterMark(){if(!or(this))throw rr(\"highWaterMark\");return this._byteLengthQueuingStrategyHighWaterMark}get size(){if(!or(this))throw rr(\"size\");return tr}}function rr(e){return new TypeError(`ByteLengthQueuingStrategy.prototype.${e} can only be used on a ByteLengthQueuingStrategy`)}function or(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_byteLengthQueuingStrategyHighWaterMark\")&&e instanceof ByteLengthQueuingStrategy)}Object.defineProperties(ByteLengthQueuingStrategy.prototype,{highWaterMark:{enumerable:!0},size:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ByteLengthQueuingStrategy.prototype,t.toStringTag,{value:\"ByteLengthQueuingStrategy\",configurable:!0});const nr=()=>1;a(nr,\"size\");class CountQueuingStrategy{constructor(e){M(e,1,\"CountQueuingStrategy\"),e=er(e,\"First parameter\"),this._countQueuingStrategyHighWaterMark=e.highWaterMark}get highWaterMark(){if(!ir(this))throw ar(\"highWaterMark\");return this._countQueuingStrategyHighWaterMark}get size(){if(!ir(this))throw ar(\"size\");return nr}}function ar(e){return new TypeError(`CountQueuingStrategy.prototype.${e} can only be used on a CountQueuingStrategy`)}function ir(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_countQueuingStrategyHighWaterMark\")&&e instanceof CountQueuingStrategy)}function lr(e,t,r){return I(e,r),r=>w(e,t,[r])}function sr(e,t,r){return I(e,r),r=>S(e,t,[r])}function ur(e,t,r){return I(e,r),(r,o)=>w(e,t,[r,o])}Object.defineProperties(CountQueuingStrategy.prototype,{highWaterMark:{enumerable:!0},size:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(CountQueuingStrategy.prototype,t.toStringTag,{value:\"CountQueuingStrategy\",configurable:!0});class TransformStream{constructor(e={},t={},r={}){void 0===e&&(e=null);const o=Qe(t,\"Second parameter\"),n=Qe(r,\"Third parameter\"),a=function(e,t){D(e,t);const r=null==e?void 0:e.flush,o=null==e?void 0:e.readableType,n=null==e?void 0:e.start,a=null==e?void 0:e.transform,i=null==e?void 0:e.writableType;return{flush:void 0===r?void 0:lr(r,e,`${t} has member 'flush' that`),readableType:o,start:void 0===n?void 0:sr(n,e,`${t} has member 'start' that`),transform:void 0===a?void 0:ur(a,e,`${t} has member 'transform' that`),writableType:i}}(e,\"First parameter\");if(void 0!==a.readableType)throw new RangeError(\"Invalid readableType specified\");if(void 0!==a.writableType)throw new RangeError(\"Invalid writableType specified\");const i=Me(n,0),l=Ye(n),s=Me(o,1),u=Ye(o);let b;!function(e,t,r,o,n,a){function i(){return t}function l(t){return function(e,t){const r=e._transformStreamController;if(e._backpressure){return m(e._backpressureChangePromise,(()=>{if(\"erroring\"===(Xe(e._writable)?e._writable._state:e._writableState))throw Xe(e._writable)?e._writable._storedError:e._writableStoredError;return mr(r,t)}))}return mr(r,t)}(e,t)}function s(t){return function(e,t){return dr(e,t),d(void 0)}(e,t)}function u(){return function(e){const t=e._transformStreamController,r=t._flushAlgorithm();return _r(t),m(r,(()=>{if(\"errored\"===e._readableState)throw e._readableStoredError;Sr(e)&&wr(e)}),(t=>{throw dr(e,t),e._readableStoredError}))}(e)}function c(){return function(e){return br(e,!1),e._backpressureChangePromise}(e)}function f(t){return fr(e,t),d(void 0)}e._writableState=\"writable\",e._writableStoredError=void 0,e._writableHasInFlightOperation=!1,e._writableStarted=!1,e._writable=function(e,t,r,o,n,a,i){return new WritableStream({start(r){e._writableController=r;try{const t=r.signal;void 0!==t&&t.addEventListener(\"abort\",(()=>{\"writable\"===e._writableState&&(e._writableState=\"erroring\",t.reason&&(e._writableStoredError=t.reason))}))}catch(e){}return m(t(),(()=>(e._writableStarted=!0,Pr(e),null)),(t=>{throw e._writableStarted=!0,Tr(e,t),t}))},write:t=>(function(e){e._writableHasInFlightOperation=!0}(e),m(r(t),(()=>(function(e){e._writableHasInFlightOperation=!1}(e),Pr(e),null)),(t=>{throw function(e,t){e._writableHasInFlightOperation=!1,Tr(e,t)}(e,t),t}))),close:()=>(function(e){e._writableHasInFlightOperation=!0}(e),m(o(),(()=>(function(e){e._writableHasInFlightOperation=!1;\"erroring\"===e._writableState&&(e._writableStoredError=void 0);e._writableState=\"closed\"}(e),null)),(t=>{throw function(e,t){e._writableHasInFlightOperation=!1,e._writableState,Tr(e,t)}(e,t),t}))),abort:t=>(e._writableState=\"errored\",e._writableStoredError=t,n(t))},{highWaterMark:a,size:i})}(e,i,l,u,s,r,o),e._readableState=\"readable\",e._readableStoredError=void 0,e._readableCloseRequested=!1,e._readablePulling=!1,e._readable=function(e,t,r,o,n,a){return new ReadableStream({start:r=>(e._readableController=r,t().catch((t=>{vr(e,t)}))),pull:()=>(e._readablePulling=!0,r().catch((t=>{vr(e,t)}))),cancel:t=>(e._readableState=\"closed\",o(t))},{highWaterMark:n,size:a})}(e,i,c,f,n,a),e._backpressure=void 0,e._backpressureChangePromise=void 0,e._backpressureChangePromise_resolve=void 0,br(e,!0),e._transformStreamController=void 0}(this,c((e=>{b=e})),s,u,i,l),function(e,t){const r=Object.create(TransformStreamDefaultController.prototype);let o,n;o=void 0!==t.transform?e=>t.transform(e,r):e=>{try{return pr(r,e),d(void 0)}catch(e){return f(e)}};n=void 0!==t.flush?()=>t.flush(r):()=>d(void 0);!function(e,t,r,o){t._controlledTransformStream=e,e._transformStreamController=t,t._transformAlgorithm=r,t._flushAlgorithm=o}(e,r,o,n)}(this,a),void 0!==a.start?b(a.start(this._transformStreamController)):b(void 0)}get readable(){if(!cr(this))throw gr(\"readable\");return this._readable}get writable(){if(!cr(this))throw gr(\"writable\");return this._writable}}function cr(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_transformStreamController\")&&e instanceof TransformStream)}function dr(e,t){vr(e,t),fr(e,t)}function fr(e,t){_r(e._transformStreamController),function(e,t){e._writableController.error(t);\"writable\"===e._writableState&&qr(e,t)}(e,t),e._backpressure&&br(e,!1)}function br(e,t){void 0!==e._backpressureChangePromise&&e._backpressureChangePromise_resolve(),e._backpressureChangePromise=c((t=>{e._backpressureChangePromise_resolve=t})),e._backpressure=t}Object.defineProperties(TransformStream.prototype,{readable:{enumerable:!0},writable:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(TransformStream.prototype,t.toStringTag,{value:\"TransformStream\",configurable:!0});class TransformStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get desiredSize(){if(!hr(this))throw yr(\"desiredSize\");return Rr(this._controlledTransformStream)}enqueue(e){if(!hr(this))throw yr(\"enqueue\");pr(this,e)}error(e){if(!hr(this))throw yr(\"error\");var t;t=e,dr(this._controlledTransformStream,t)}terminate(){if(!hr(this))throw yr(\"terminate\");!function(e){const t=e._controlledTransformStream;Sr(t)&&wr(t);const r=new TypeError(\"TransformStream terminated\");fr(t,r)}(this)}}function hr(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledTransformStream\")&&e instanceof TransformStreamDefaultController)}function _r(e){e._transformAlgorithm=void 0,e._flushAlgorithm=void 0}function pr(e,t){const r=e._controlledTransformStream;if(!Sr(r))throw new TypeError(\"Readable side is not in a state that permits enqueue\");try{!function(e,t){e._readablePulling=!1;try{e._readableController.enqueue(t)}catch(t){throw vr(e,t),t}}(r,t)}catch(e){throw fr(r,e),r._readableStoredError}const o=function(e){return!function(e){if(!Sr(e))return!1;if(e._readablePulling)return!0;if(Rr(e)>0)return!0;return!1}(e)}(r);o!==r._backpressure&&br(r,!0)}function mr(e,t){return m(e._transformAlgorithm(t),void 0,(t=>{throw dr(e._controlledTransformStream,t),t}))}function yr(e){return new TypeError(`TransformStreamDefaultController.prototype.${e} can only be used on a TransformStreamDefaultController`)}function gr(e){return new TypeError(`TransformStream.prototype.${e} can only be used on a TransformStream`)}function Sr(e){return!e._readableCloseRequested&&\"readable\"===e._readableState}function wr(e){e._readableState=\"closed\",e._readableCloseRequested=!0,e._readableController.close()}function vr(e,t){\"readable\"===e._readableState&&(e._readableState=\"errored\",e._readableStoredError=t),e._readableController.error(t)}function Rr(e){return e._readableController.desiredSize}function Tr(e,t){\"writable\"!==e._writableState?Cr(e):qr(e,t)}function qr(e,t){e._writableState=\"erroring\",e._writableStoredError=t,!function(e){return e._writableHasInFlightOperation}(e)&&e._writableStarted&&Cr(e)}function Cr(e){e._writableState=\"errored\"}function Pr(e){\"erroring\"===e._writableState&&Cr(e)}Object.defineProperties(TransformStreamDefaultController.prototype,{enqueue:{enumerable:!0},error:{enumerable:!0},terminate:{enumerable:!0},desiredSize:{enumerable:!0}}),a(TransformStreamDefaultController.prototype.enqueue,\"enqueue\"),a(TransformStreamDefaultController.prototype.error,\"error\"),a(TransformStreamDefaultController.prototype.terminate,\"terminate\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(TransformStreamDefaultController.prototype,t.toStringTag,{value:\"TransformStreamDefaultController\",configurable:!0}),e.ByteLengthQueuingStrategy=ByteLengthQueuingStrategy,e.CountQueuingStrategy=CountQueuingStrategy,e.ReadableByteStreamController=ReadableByteStreamController,e.ReadableStream=ReadableStream,e.ReadableStreamBYOBReader=ReadableStreamBYOBReader,e.ReadableStreamBYOBRequest=ReadableStreamBYOBRequest,e.ReadableStreamDefaultController=ReadableStreamDefaultController,e.ReadableStreamDefaultReader=ReadableStreamDefaultReader,e.TransformStream=TransformStream,e.TransformStreamDefaultController=TransformStreamDefaultController,e.WritableStream=WritableStream,e.WritableStreamDefaultController=WritableStreamDefaultController,e.WritableStreamDefaultWriter=WritableStreamDefaultWriter,Object.defineProperty(e,\"__esModule\",{value:!0})}));\n","/*!\n * humanize-ms - index.js\n * Copyright(c) 2014 dead_horse \n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module dependencies.\n */\n\nvar util = require('util');\nvar ms = require('ms');\n\nmodule.exports = function (t) {\n if (typeof t === 'number') return t;\n var r = ms(t);\n if (r === undefined) {\n var err = new Error(util.format('humanize-ms(%j) result undefined', t));\n console.warn(err.stack);\n }\n return r;\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function (val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","/*! node-domexception. MIT License. Jimmy Wärting */\n\nif (!globalThis.DOMException) {\n try {\n const { MessageChannel } = require('worker_threads'),\n port = new MessageChannel().port1,\n ab = new ArrayBuffer()\n port.postMessage(ab, [ab, ab])\n } catch (err) {\n err.constructor.name === 'DOMException' && (\n globalThis.DOMException = err.constructor\n )\n }\n}\n\nmodule.exports = globalThis.DOMException\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","\"use strict\";function _typeof(obj){\"@babel/helpers - typeof\";return _typeof=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?function(obj){return typeof obj}:function(obj){return obj&&\"function\"==typeof Symbol&&obj.constructor===Symbol&&obj!==Symbol.prototype?\"symbol\":typeof obj},_typeof(obj)}function _createForOfIteratorHelper(o,allowArrayLike){var it=typeof Symbol!==\"undefined\"&&o[Symbol.iterator]||o[\"@@iterator\"];if(!it){if(Array.isArray(o)||(it=_unsupportedIterableToArray(o))||allowArrayLike&&o&&typeof o.length===\"number\"){if(it)o=it;var i=0;var F=function F(){};return{s:F,n:function n(){if(i>=o.length)return{done:true};return{done:false,value:o[i++]}},e:function e(_e2){throw _e2},f:F}}throw new TypeError(\"Invalid attempt to iterate non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\")}var normalCompletion=true,didErr=false,err;return{s:function s(){it=it.call(o)},n:function n(){var step=it.next();normalCompletion=step.done;return step},e:function e(_e3){didErr=true;err=_e3},f:function f(){try{if(!normalCompletion&&it[\"return\"]!=null)it[\"return\"]()}finally{if(didErr)throw err}}}}function _defineProperty(obj,key,value){key=_toPropertyKey(key);if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true})}else{obj[key]=value}return obj}function _toPropertyKey(arg){var key=_toPrimitive(arg,\"string\");return _typeof(key)===\"symbol\"?key:String(key)}function _toPrimitive(input,hint){if(_typeof(input)!==\"object\"||input===null)return input;var prim=input[Symbol.toPrimitive];if(prim!==undefined){var res=prim.call(input,hint||\"default\");if(_typeof(res)!==\"object\")return res;throw new TypeError(\"@@toPrimitive must return a primitive value.\")}return(hint===\"string\"?String:Number)(input)}function _slicedToArray(arr,i){return _arrayWithHoles(arr)||_iterableToArrayLimit(arr,i)||_unsupportedIterableToArray(arr,i)||_nonIterableRest()}function _nonIterableRest(){throw new TypeError(\"Invalid attempt to destructure non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\")}function _unsupportedIterableToArray(o,minLen){if(!o)return;if(typeof o===\"string\")return _arrayLikeToArray(o,minLen);var n=Object.prototype.toString.call(o).slice(8,-1);if(n===\"Object\"&&o.constructor)n=o.constructor.name;if(n===\"Map\"||n===\"Set\")return Array.from(o);if(n===\"Arguments\"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return _arrayLikeToArray(o,minLen)}function _arrayLikeToArray(arr,len){if(len==null||len>arr.length)len=arr.length;for(var i=0,arr2=new Array(len);i= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","/**\n * web-streams-polyfill v3.2.1\n */\n(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.WebStreamsPolyfill = {}));\n}(this, (function (exports) { 'use strict';\n\n /// \n const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?\n Symbol :\n description => `Symbol(${description})`;\n\n /// \n function noop() {\n return undefined;\n }\n function getGlobals() {\n if (typeof self !== 'undefined') {\n return self;\n }\n else if (typeof window !== 'undefined') {\n return window;\n }\n else if (typeof global !== 'undefined') {\n return global;\n }\n return undefined;\n }\n const globals = getGlobals();\n\n function typeIsObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n const rethrowAssertionErrorRejection = noop;\n\n const originalPromise = Promise;\n const originalPromiseThen = Promise.prototype.then;\n const originalPromiseResolve = Promise.resolve.bind(originalPromise);\n const originalPromiseReject = Promise.reject.bind(originalPromise);\n function newPromise(executor) {\n return new originalPromise(executor);\n }\n function promiseResolvedWith(value) {\n return originalPromiseResolve(value);\n }\n function promiseRejectedWith(reason) {\n return originalPromiseReject(reason);\n }\n function PerformPromiseThen(promise, onFulfilled, onRejected) {\n // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an\n // approximation.\n return originalPromiseThen.call(promise, onFulfilled, onRejected);\n }\n function uponPromise(promise, onFulfilled, onRejected) {\n PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);\n }\n function uponFulfillment(promise, onFulfilled) {\n uponPromise(promise, onFulfilled);\n }\n function uponRejection(promise, onRejected) {\n uponPromise(promise, undefined, onRejected);\n }\n function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {\n return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);\n }\n function setPromiseIsHandledToTrue(promise) {\n PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);\n }\n const queueMicrotask = (() => {\n const globalQueueMicrotask = globals && globals.queueMicrotask;\n if (typeof globalQueueMicrotask === 'function') {\n return globalQueueMicrotask;\n }\n const resolvedPromise = promiseResolvedWith(undefined);\n return (fn) => PerformPromiseThen(resolvedPromise, fn);\n })();\n function reflectCall(F, V, args) {\n if (typeof F !== 'function') {\n throw new TypeError('Argument is not a function');\n }\n return Function.prototype.apply.call(F, V, args);\n }\n function promiseCall(F, V, args) {\n try {\n return promiseResolvedWith(reflectCall(F, V, args));\n }\n catch (value) {\n return promiseRejectedWith(value);\n }\n }\n\n // Original from Chromium\n // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js\n const QUEUE_MAX_ARRAY_SIZE = 16384;\n /**\n * Simple queue structure.\n *\n * Avoids scalability issues with using a packed array directly by using\n * multiple arrays in a linked list and keeping the array size bounded.\n */\n class SimpleQueue {\n constructor() {\n this._cursor = 0;\n this._size = 0;\n // _front and _back are always defined.\n this._front = {\n _elements: [],\n _next: undefined\n };\n this._back = this._front;\n // The cursor is used to avoid calling Array.shift().\n // It contains the index of the front element of the array inside the\n // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).\n this._cursor = 0;\n // When there is only one node, size === elements.length - cursor.\n this._size = 0;\n }\n get length() {\n return this._size;\n }\n // For exception safety, this method is structured in order:\n // 1. Read state\n // 2. Calculate required state mutations\n // 3. Perform state mutations\n push(element) {\n const oldBack = this._back;\n let newBack = oldBack;\n if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {\n newBack = {\n _elements: [],\n _next: undefined\n };\n }\n // push() is the mutation most likely to throw an exception, so it\n // goes first.\n oldBack._elements.push(element);\n if (newBack !== oldBack) {\n this._back = newBack;\n oldBack._next = newBack;\n }\n ++this._size;\n }\n // Like push(), shift() follows the read -> calculate -> mutate pattern for\n // exception safety.\n shift() { // must not be called on an empty queue\n const oldFront = this._front;\n let newFront = oldFront;\n const oldCursor = this._cursor;\n let newCursor = oldCursor + 1;\n const elements = oldFront._elements;\n const element = elements[oldCursor];\n if (newCursor === QUEUE_MAX_ARRAY_SIZE) {\n newFront = oldFront._next;\n newCursor = 0;\n }\n // No mutations before this point.\n --this._size;\n this._cursor = newCursor;\n if (oldFront !== newFront) {\n this._front = newFront;\n }\n // Permit shifted element to be garbage collected.\n elements[oldCursor] = undefined;\n return element;\n }\n // The tricky thing about forEach() is that it can be called\n // re-entrantly. The queue may be mutated inside the callback. It is easy to\n // see that push() within the callback has no negative effects since the end\n // of the queue is checked for on every iteration. If shift() is called\n // repeatedly within the callback then the next iteration may return an\n // element that has been removed. In this case the callback will be called\n // with undefined values until we either \"catch up\" with elements that still\n // exist or reach the back of the queue.\n forEach(callback) {\n let i = this._cursor;\n let node = this._front;\n let elements = node._elements;\n while (i !== elements.length || node._next !== undefined) {\n if (i === elements.length) {\n node = node._next;\n elements = node._elements;\n i = 0;\n if (elements.length === 0) {\n break;\n }\n }\n callback(elements[i]);\n ++i;\n }\n }\n // Return the element that would be returned if shift() was called now,\n // without modifying the queue.\n peek() { // must not be called on an empty queue\n const front = this._front;\n const cursor = this._cursor;\n return front._elements[cursor];\n }\n }\n\n function ReadableStreamReaderGenericInitialize(reader, stream) {\n reader._ownerReadableStream = stream;\n stream._reader = reader;\n if (stream._state === 'readable') {\n defaultReaderClosedPromiseInitialize(reader);\n }\n else if (stream._state === 'closed') {\n defaultReaderClosedPromiseInitializeAsResolved(reader);\n }\n else {\n defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);\n }\n }\n // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state\n // check.\n function ReadableStreamReaderGenericCancel(reader, reason) {\n const stream = reader._ownerReadableStream;\n return ReadableStreamCancel(stream, reason);\n }\n function ReadableStreamReaderGenericRelease(reader) {\n if (reader._ownerReadableStream._state === 'readable') {\n defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n else {\n defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n reader._ownerReadableStream._reader = undefined;\n reader._ownerReadableStream = undefined;\n }\n // Helper functions for the readers.\n function readerLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released reader');\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderClosedPromiseInitialize(reader) {\n reader._closedPromise = newPromise((resolve, reject) => {\n reader._closedPromise_resolve = resolve;\n reader._closedPromise_reject = reject;\n });\n }\n function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseReject(reader, reason);\n }\n function defaultReaderClosedPromiseInitializeAsResolved(reader) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseResolve(reader);\n }\n function defaultReaderClosedPromiseReject(reader, reason) {\n if (reader._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(reader._closedPromise);\n reader._closedPromise_reject(reason);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n function defaultReaderClosedPromiseResetToRejected(reader, reason) {\n defaultReaderClosedPromiseInitializeAsRejected(reader, reason);\n }\n function defaultReaderClosedPromiseResolve(reader) {\n if (reader._closedPromise_resolve === undefined) {\n return;\n }\n reader._closedPromise_resolve(undefined);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n\n const AbortSteps = SymbolPolyfill('[[AbortSteps]]');\n const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');\n const CancelSteps = SymbolPolyfill('[[CancelSteps]]');\n const PullSteps = SymbolPolyfill('[[PullSteps]]');\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill\n const NumberIsFinite = Number.isFinite || function (x) {\n return typeof x === 'number' && isFinite(x);\n };\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill\n const MathTrunc = Math.trunc || function (v) {\n return v < 0 ? Math.ceil(v) : Math.floor(v);\n };\n\n // https://heycam.github.io/webidl/#idl-dictionaries\n function isDictionary(x) {\n return typeof x === 'object' || typeof x === 'function';\n }\n function assertDictionary(obj, context) {\n if (obj !== undefined && !isDictionary(obj)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-callback-functions\n function assertFunction(x, context) {\n if (typeof x !== 'function') {\n throw new TypeError(`${context} is not a function.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-object\n function isObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n function assertObject(x, context) {\n if (!isObject(x)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n function assertRequiredArgument(x, position, context) {\n if (x === undefined) {\n throw new TypeError(`Parameter ${position} is required in '${context}'.`);\n }\n }\n function assertRequiredField(x, field, context) {\n if (x === undefined) {\n throw new TypeError(`${field} is required in '${context}'.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-unrestricted-double\n function convertUnrestrictedDouble(value) {\n return Number(value);\n }\n function censorNegativeZero(x) {\n return x === 0 ? 0 : x;\n }\n function integerPart(x) {\n return censorNegativeZero(MathTrunc(x));\n }\n // https://heycam.github.io/webidl/#idl-unsigned-long-long\n function convertUnsignedLongLongWithEnforceRange(value, context) {\n const lowerBound = 0;\n const upperBound = Number.MAX_SAFE_INTEGER;\n let x = Number(value);\n x = censorNegativeZero(x);\n if (!NumberIsFinite(x)) {\n throw new TypeError(`${context} is not a finite number`);\n }\n x = integerPart(x);\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);\n }\n if (!NumberIsFinite(x) || x === 0) {\n return 0;\n }\n // TODO Use BigInt if supported?\n // let xBigInt = BigInt(integerPart(x));\n // xBigInt = BigInt.asUintN(64, xBigInt);\n // return Number(xBigInt);\n return x;\n }\n\n function assertReadableStream(x, context) {\n if (!IsReadableStream(x)) {\n throw new TypeError(`${context} is not a ReadableStream.`);\n }\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamDefaultReader(stream) {\n return new ReadableStreamDefaultReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadRequest(stream, readRequest) {\n stream._reader._readRequests.push(readRequest);\n }\n function ReadableStreamFulfillReadRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readRequest = reader._readRequests.shift();\n if (done) {\n readRequest._closeSteps();\n }\n else {\n readRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadRequests(stream) {\n return stream._reader._readRequests.length;\n }\n function ReadableStreamHasDefaultReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamDefaultReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A default reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamDefaultReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed,\n * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamDefaultReader(this)) {\n throw defaultReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamDefaultReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamDefaultReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultReader;\n }\n function ReadableStreamDefaultReaderRead(reader, readRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'closed') {\n readRequest._closeSteps();\n }\n else if (stream._state === 'errored') {\n readRequest._errorSteps(stream._storedError);\n }\n else {\n stream._readableStreamController[PullSteps](readRequest);\n }\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);\n }\n\n /// \n /* eslint-disable @typescript-eslint/no-empty-function */\n const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);\n\n /// \n class ReadableStreamAsyncIteratorImpl {\n constructor(reader, preventCancel) {\n this._ongoingPromise = undefined;\n this._isFinished = false;\n this._reader = reader;\n this._preventCancel = preventCancel;\n }\n next() {\n const nextSteps = () => this._nextSteps();\n this._ongoingPromise = this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :\n nextSteps();\n return this._ongoingPromise;\n }\n return(value) {\n const returnSteps = () => this._returnSteps(value);\n return this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :\n returnSteps();\n }\n _nextSteps() {\n if (this._isFinished) {\n return Promise.resolve({ value: undefined, done: true });\n }\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('iterate'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => {\n this._ongoingPromise = undefined;\n // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.\n // FIXME Is this a bug in the specification, or in the test?\n queueMicrotask(() => resolvePromise({ value: chunk, done: false }));\n },\n _closeSteps: () => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n resolvePromise({ value: undefined, done: true });\n },\n _errorSteps: reason => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n rejectPromise(reason);\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promise;\n }\n _returnSteps(value) {\n if (this._isFinished) {\n return Promise.resolve({ value, done: true });\n }\n this._isFinished = true;\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('finish iterating'));\n }\n if (!this._preventCancel) {\n const result = ReadableStreamReaderGenericCancel(reader, value);\n ReadableStreamReaderGenericRelease(reader);\n return transformPromiseWith(result, () => ({ value, done: true }));\n }\n ReadableStreamReaderGenericRelease(reader);\n return promiseResolvedWith({ value, done: true });\n }\n }\n const ReadableStreamAsyncIteratorPrototype = {\n next() {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));\n }\n return this._asyncIteratorImpl.next();\n },\n return(value) {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));\n }\n return this._asyncIteratorImpl.return(value);\n }\n };\n if (AsyncIteratorPrototype !== undefined) {\n Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);\n }\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamAsyncIterator(stream, preventCancel) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);\n iterator._asyncIteratorImpl = impl;\n return iterator;\n }\n function IsReadableStreamAsyncIterator(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {\n return false;\n }\n try {\n // noinspection SuspiciousTypeOfGuard\n return x._asyncIteratorImpl instanceof\n ReadableStreamAsyncIteratorImpl;\n }\n catch (_a) {\n return false;\n }\n }\n // Helper functions for the ReadableStream.\n function streamAsyncIteratorBrandCheckException(name) {\n return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);\n }\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill\n const NumberIsNaN = Number.isNaN || function (x) {\n // eslint-disable-next-line no-self-compare\n return x !== x;\n };\n\n function CreateArrayFromList(elements) {\n // We use arrays to represent lists, so this is basically a no-op.\n // Do a slice though just in case we happen to depend on the unique-ness.\n return elements.slice();\n }\n function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {\n new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);\n }\n // Not implemented correctly\n function TransferArrayBuffer(O) {\n return O;\n }\n // Not implemented correctly\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n function IsDetachedBuffer(O) {\n return false;\n }\n function ArrayBufferSlice(buffer, begin, end) {\n // ArrayBuffer.prototype.slice is not available on IE10\n // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice\n if (buffer.slice) {\n return buffer.slice(begin, end);\n }\n const length = end - begin;\n const slice = new ArrayBuffer(length);\n CopyDataBlockBytes(slice, 0, buffer, begin, length);\n return slice;\n }\n\n function IsNonNegativeNumber(v) {\n if (typeof v !== 'number') {\n return false;\n }\n if (NumberIsNaN(v)) {\n return false;\n }\n if (v < 0) {\n return false;\n }\n return true;\n }\n function CloneAsUint8Array(O) {\n const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);\n return new Uint8Array(buffer);\n }\n\n function DequeueValue(container) {\n const pair = container._queue.shift();\n container._queueTotalSize -= pair.size;\n if (container._queueTotalSize < 0) {\n container._queueTotalSize = 0;\n }\n return pair.value;\n }\n function EnqueueValueWithSize(container, value, size) {\n if (!IsNonNegativeNumber(size) || size === Infinity) {\n throw new RangeError('Size must be a finite, non-NaN, non-negative number.');\n }\n container._queue.push({ value, size });\n container._queueTotalSize += size;\n }\n function PeekQueueValue(container) {\n const pair = container._queue.peek();\n return pair.value;\n }\n function ResetQueue(container) {\n container._queue = new SimpleQueue();\n container._queueTotalSize = 0;\n }\n\n /**\n * A pull-into request in a {@link ReadableByteStreamController}.\n *\n * @public\n */\n class ReadableStreamBYOBRequest {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.\n */\n get view() {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('view');\n }\n return this._view;\n }\n respond(bytesWritten) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respond');\n }\n assertRequiredArgument(bytesWritten, 1, 'respond');\n bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(this._view.buffer)) ;\n ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);\n }\n respondWithNewView(view) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respondWithNewView');\n }\n assertRequiredArgument(view, 1, 'respondWithNewView');\n if (!ArrayBuffer.isView(view)) {\n throw new TypeError('You can only respond with array buffer views');\n }\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(view.buffer)) ;\n ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);\n }\n }\n Object.defineProperties(ReadableStreamBYOBRequest.prototype, {\n respond: { enumerable: true },\n respondWithNewView: { enumerable: true },\n view: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBRequest',\n configurable: true\n });\n }\n /**\n * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableByteStreamController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the current BYOB pull request, or `null` if there isn't one.\n */\n get byobRequest() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('byobRequest');\n }\n return ReadableByteStreamControllerGetBYOBRequest(this);\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('desiredSize');\n }\n return ReadableByteStreamControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('close');\n }\n if (this._closeRequested) {\n throw new TypeError('The stream has already been closed; do not close it again!');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);\n }\n ReadableByteStreamControllerClose(this);\n }\n enqueue(chunk) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('enqueue');\n }\n assertRequiredArgument(chunk, 1, 'enqueue');\n if (!ArrayBuffer.isView(chunk)) {\n throw new TypeError('chunk must be an array buffer view');\n }\n if (chunk.byteLength === 0) {\n throw new TypeError('chunk must have non-zero byteLength');\n }\n if (chunk.buffer.byteLength === 0) {\n throw new TypeError(`chunk's buffer must have non-zero byteLength`);\n }\n if (this._closeRequested) {\n throw new TypeError('stream is closed or draining');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);\n }\n ReadableByteStreamControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('error');\n }\n ReadableByteStreamControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ReadableByteStreamControllerClearPendingPullIntos(this);\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableByteStreamControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableByteStream;\n if (this._queueTotalSize > 0) {\n const entry = this._queue.shift();\n this._queueTotalSize -= entry.byteLength;\n ReadableByteStreamControllerHandleQueueDrain(this);\n const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);\n readRequest._chunkSteps(view);\n return;\n }\n const autoAllocateChunkSize = this._autoAllocateChunkSize;\n if (autoAllocateChunkSize !== undefined) {\n let buffer;\n try {\n buffer = new ArrayBuffer(autoAllocateChunkSize);\n }\n catch (bufferE) {\n readRequest._errorSteps(bufferE);\n return;\n }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: autoAllocateChunkSize,\n byteOffset: 0,\n byteLength: autoAllocateChunkSize,\n bytesFilled: 0,\n elementSize: 1,\n viewConstructor: Uint8Array,\n readerType: 'default'\n };\n this._pendingPullIntos.push(pullIntoDescriptor);\n }\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableByteStreamControllerCallPullIfNeeded(this);\n }\n }\n Object.defineProperties(ReadableByteStreamController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n byobRequest: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableByteStreamController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableByteStreamController.\n function IsReadableByteStreamController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {\n return false;\n }\n return x instanceof ReadableByteStreamController;\n }\n function IsReadableStreamBYOBRequest(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBRequest;\n }\n function ReadableByteStreamControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n // TODO: Test controller argument\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableByteStreamControllerError(controller, e);\n });\n }\n function ReadableByteStreamControllerClearPendingPullIntos(controller) {\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n controller._pendingPullIntos = new SimpleQueue();\n }\n function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {\n let done = false;\n if (stream._state === 'closed') {\n done = true;\n }\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n if (pullIntoDescriptor.readerType === 'default') {\n ReadableStreamFulfillReadRequest(stream, filledView, done);\n }\n else {\n ReadableStreamFulfillReadIntoRequest(stream, filledView, done);\n }\n }\n function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {\n const bytesFilled = pullIntoDescriptor.bytesFilled;\n const elementSize = pullIntoDescriptor.elementSize;\n return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);\n }\n function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {\n controller._queue.push({ buffer, byteOffset, byteLength });\n controller._queueTotalSize += byteLength;\n }\n function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {\n const elementSize = pullIntoDescriptor.elementSize;\n const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;\n const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);\n const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;\n const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;\n let totalBytesToCopyRemaining = maxBytesToCopy;\n let ready = false;\n if (maxAlignedBytes > currentAlignedBytes) {\n totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;\n ready = true;\n }\n const queue = controller._queue;\n while (totalBytesToCopyRemaining > 0) {\n const headOfQueue = queue.peek();\n const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);\n const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);\n if (headOfQueue.byteLength === bytesToCopy) {\n queue.shift();\n }\n else {\n headOfQueue.byteOffset += bytesToCopy;\n headOfQueue.byteLength -= bytesToCopy;\n }\n controller._queueTotalSize -= bytesToCopy;\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);\n totalBytesToCopyRemaining -= bytesToCopy;\n }\n return ready;\n }\n function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {\n pullIntoDescriptor.bytesFilled += size;\n }\n function ReadableByteStreamControllerHandleQueueDrain(controller) {\n if (controller._queueTotalSize === 0 && controller._closeRequested) {\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(controller._controlledReadableByteStream);\n }\n else {\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }\n function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {\n if (controller._byobRequest === null) {\n return;\n }\n controller._byobRequest._associatedReadableByteStreamController = undefined;\n controller._byobRequest._view = null;\n controller._byobRequest = null;\n }\n function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {\n while (controller._pendingPullIntos.length > 0) {\n if (controller._queueTotalSize === 0) {\n return;\n }\n const pullIntoDescriptor = controller._pendingPullIntos.peek();\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {\n const stream = controller._controlledReadableByteStream;\n let elementSize = 1;\n if (view.constructor !== DataView) {\n elementSize = view.constructor.BYTES_PER_ELEMENT;\n }\n const ctor = view.constructor;\n // try {\n const buffer = TransferArrayBuffer(view.buffer);\n // } catch (e) {\n // readIntoRequest._errorSteps(e);\n // return;\n // }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: buffer.byteLength,\n byteOffset: view.byteOffset,\n byteLength: view.byteLength,\n bytesFilled: 0,\n elementSize,\n viewConstructor: ctor,\n readerType: 'byob'\n };\n if (controller._pendingPullIntos.length > 0) {\n controller._pendingPullIntos.push(pullIntoDescriptor);\n // No ReadableByteStreamControllerCallPullIfNeeded() call since:\n // - No change happens on desiredSize\n // - The source has already been notified of that there's at least 1 pending read(view)\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n return;\n }\n if (stream._state === 'closed') {\n const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);\n readIntoRequest._closeSteps(emptyView);\n return;\n }\n if (controller._queueTotalSize > 0) {\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n ReadableByteStreamControllerHandleQueueDrain(controller);\n readIntoRequest._chunkSteps(filledView);\n return;\n }\n if (controller._closeRequested) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n readIntoRequest._errorSteps(e);\n return;\n }\n }\n controller._pendingPullIntos.push(pullIntoDescriptor);\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {\n const stream = controller._controlledReadableByteStream;\n if (ReadableStreamHasBYOBReader(stream)) {\n while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);\n if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {\n return;\n }\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;\n if (remainderSize > 0) {\n const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);\n }\n pullIntoDescriptor.bytesFilled -= remainderSize;\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n ReadableByteStreamControllerRespondInClosedState(controller);\n }\n else {\n ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerShiftPendingPullInto(controller) {\n const descriptor = controller._pendingPullIntos.shift();\n return descriptor;\n }\n function ReadableByteStreamControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return false;\n }\n if (controller._closeRequested) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableByteStreamControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n }\n // A client of ReadableByteStreamController may use these functions directly to bypass state check.\n function ReadableByteStreamControllerClose(controller) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n if (controller._queueTotalSize > 0) {\n controller._closeRequested = true;\n return;\n }\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (firstPendingPullInto.bytesFilled > 0) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n throw e;\n }\n }\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n function ReadableByteStreamControllerEnqueue(controller, chunk) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n const buffer = chunk.buffer;\n const byteOffset = chunk.byteOffset;\n const byteLength = chunk.byteLength;\n const transferredBuffer = TransferArrayBuffer(buffer);\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;\n firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);\n }\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n if (ReadableStreamHasDefaultReader(stream)) {\n if (ReadableStreamGetNumReadRequests(stream) === 0) {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n else {\n if (controller._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n }\n const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);\n ReadableStreamFulfillReadRequest(stream, transferredView, false);\n }\n }\n else if (ReadableStreamHasBYOBReader(stream)) {\n // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n else {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerError(controller, e) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return;\n }\n ReadableByteStreamControllerClearPendingPullIntos(controller);\n ResetQueue(controller);\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableByteStreamControllerGetBYOBRequest(controller) {\n if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);\n const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);\n SetUpReadableStreamBYOBRequest(byobRequest, controller, view);\n controller._byobRequest = byobRequest;\n }\n return controller._byobRequest;\n }\n function ReadableByteStreamControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableByteStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function ReadableByteStreamControllerRespond(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (bytesWritten !== 0) {\n throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');\n }\n }\n else {\n if (bytesWritten === 0) {\n throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');\n }\n if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {\n throw new RangeError('bytesWritten out of range');\n }\n }\n firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);\n ReadableByteStreamControllerRespondInternal(controller, bytesWritten);\n }\n function ReadableByteStreamControllerRespondWithNewView(controller, view) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (view.byteLength !== 0) {\n throw new TypeError('The view\\'s length must be 0 when calling respondWithNewView() on a closed stream');\n }\n }\n else {\n if (view.byteLength === 0) {\n throw new TypeError('The view\\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');\n }\n }\n if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {\n throw new RangeError('The region specified by view does not match byobRequest');\n }\n if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {\n throw new RangeError('The buffer of view has different capacity than byobRequest');\n }\n if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {\n throw new RangeError('The region specified by view is larger than byobRequest');\n }\n const viewByteLength = view.byteLength;\n firstDescriptor.buffer = TransferArrayBuffer(view.buffer);\n ReadableByteStreamControllerRespondInternal(controller, viewByteLength);\n }\n function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {\n controller._controlledReadableByteStream = stream;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._byobRequest = null;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._closeRequested = false;\n controller._started = false;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n controller._autoAllocateChunkSize = autoAllocateChunkSize;\n controller._pendingPullIntos = new SimpleQueue();\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableByteStreamControllerError(controller, r);\n });\n }\n function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {\n const controller = Object.create(ReadableByteStreamController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingByteSource.start !== undefined) {\n startAlgorithm = () => underlyingByteSource.start(controller);\n }\n if (underlyingByteSource.pull !== undefined) {\n pullAlgorithm = () => underlyingByteSource.pull(controller);\n }\n if (underlyingByteSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingByteSource.cancel(reason);\n }\n const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;\n if (autoAllocateChunkSize === 0) {\n throw new TypeError('autoAllocateChunkSize must be greater than 0');\n }\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);\n }\n function SetUpReadableStreamBYOBRequest(request, controller, view) {\n request._associatedReadableByteStreamController = controller;\n request._view = view;\n }\n // Helper functions for the ReadableStreamBYOBRequest.\n function byobRequestBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);\n }\n // Helper functions for the ReadableByteStreamController.\n function byteStreamControllerBrandCheckException(name) {\n return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamBYOBReader(stream) {\n return new ReadableStreamBYOBReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {\n stream._reader._readIntoRequests.push(readIntoRequest);\n }\n function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readIntoRequest = reader._readIntoRequests.shift();\n if (done) {\n readIntoRequest._closeSteps(chunk);\n }\n else {\n readIntoRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadIntoRequests(stream) {\n return stream._reader._readIntoRequests.length;\n }\n function ReadableStreamHasBYOBReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamBYOBReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A BYOB reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamBYOBReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n if (!IsReadableByteStreamController(stream._readableStreamController)) {\n throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +\n 'source');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readIntoRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Attempts to reads bytes into view, and returns a promise resolved with the result.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read(view) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('read'));\n }\n if (!ArrayBuffer.isView(view)) {\n return promiseRejectedWith(new TypeError('view must be an array buffer view'));\n }\n if (view.byteLength === 0) {\n return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));\n }\n if (view.buffer.byteLength === 0) {\n return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));\n }\n if (IsDetachedBuffer(view.buffer)) ;\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readIntoRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamBYOBReaderRead(this, view, readIntoRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamBYOBReader(this)) {\n throw byobReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readIntoRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamBYOBReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamBYOBReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBReader;\n }\n function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'errored') {\n readIntoRequest._errorSteps(stream._storedError);\n }\n else {\n ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);\n }\n }\n // Helper functions for the ReadableStreamBYOBReader.\n function byobReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);\n }\n\n function ExtractHighWaterMark(strategy, defaultHWM) {\n const { highWaterMark } = strategy;\n if (highWaterMark === undefined) {\n return defaultHWM;\n }\n if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {\n throw new RangeError('Invalid highWaterMark');\n }\n return highWaterMark;\n }\n function ExtractSizeAlgorithm(strategy) {\n const { size } = strategy;\n if (!size) {\n return () => 1;\n }\n return size;\n }\n\n function convertQueuingStrategy(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n const size = init === null || init === void 0 ? void 0 : init.size;\n return {\n highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),\n size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)\n };\n }\n function convertQueuingStrategySize(fn, context) {\n assertFunction(fn, context);\n return chunk => convertUnrestrictedDouble(fn(chunk));\n }\n\n function convertUnderlyingSink(original, context) {\n assertDictionary(original, context);\n const abort = original === null || original === void 0 ? void 0 : original.abort;\n const close = original === null || original === void 0 ? void 0 : original.close;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n const write = original === null || original === void 0 ? void 0 : original.write;\n return {\n abort: abort === undefined ?\n undefined :\n convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),\n close: close === undefined ?\n undefined :\n convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),\n write: write === undefined ?\n undefined :\n convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),\n type\n };\n }\n function convertUnderlyingSinkAbortCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSinkCloseCallback(fn, original, context) {\n assertFunction(fn, context);\n return () => promiseCall(fn, original, []);\n }\n function convertUnderlyingSinkStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertUnderlyingSinkWriteCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n function assertWritableStream(x, context) {\n if (!IsWritableStream(x)) {\n throw new TypeError(`${context} is not a WritableStream.`);\n }\n }\n\n function isAbortSignal(value) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n try {\n return typeof value.aborted === 'boolean';\n }\n catch (_a) {\n // AbortSignal.prototype.aborted throws if its brand check fails\n return false;\n }\n }\n const supportsAbortController = typeof AbortController === 'function';\n /**\n * Construct a new AbortController, if supported by the platform.\n *\n * @internal\n */\n function createAbortController() {\n if (supportsAbortController) {\n return new AbortController();\n }\n return undefined;\n }\n\n /**\n * A writable stream represents a destination for data, into which you can write.\n *\n * @public\n */\n class WritableStream {\n constructor(rawUnderlyingSink = {}, rawStrategy = {}) {\n if (rawUnderlyingSink === undefined) {\n rawUnderlyingSink = null;\n }\n else {\n assertObject(rawUnderlyingSink, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');\n InitializeWritableStream(this);\n const type = underlyingSink.type;\n if (type !== undefined) {\n throw new RangeError('Invalid type is specified');\n }\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);\n }\n /**\n * Returns whether or not the writable stream is locked to a writer.\n */\n get locked() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('locked');\n }\n return IsWritableStreamLocked(this);\n }\n /**\n * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be\n * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort\n * mechanism of the underlying sink.\n *\n * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled\n * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel\n * the stream) if the stream is currently locked.\n */\n abort(reason = undefined) {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('abort'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));\n }\n return WritableStreamAbort(this, reason);\n }\n /**\n * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its\n * close behavior. During this time any further attempts to write will fail (without erroring the stream).\n *\n * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream\n * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with\n * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.\n */\n close() {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('close'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));\n }\n if (WritableStreamCloseQueuedOrInFlight(this)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamClose(this);\n }\n /**\n * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream\n * is locked, no other writer can be acquired until this one is released.\n *\n * This functionality is especially useful for creating abstractions that desire the ability to write to a stream\n * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at\n * the same time, which would cause the resulting written data to be unpredictable and probably useless.\n */\n getWriter() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('getWriter');\n }\n return AcquireWritableStreamDefaultWriter(this);\n }\n }\n Object.defineProperties(WritableStream.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n getWriter: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStream',\n configurable: true\n });\n }\n // Abstract operations for the WritableStream.\n function AcquireWritableStreamDefaultWriter(stream) {\n return new WritableStreamDefaultWriter(stream);\n }\n // Throws if and only if startAlgorithm throws.\n function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(WritableStream.prototype);\n InitializeWritableStream(stream);\n const controller = Object.create(WritableStreamDefaultController.prototype);\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n function InitializeWritableStream(stream) {\n stream._state = 'writable';\n // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is\n // 'erroring' or 'errored'. May be set to an undefined value.\n stream._storedError = undefined;\n stream._writer = undefined;\n // Initialize to undefined first because the constructor of the controller checks this\n // variable to validate the caller.\n stream._writableStreamController = undefined;\n // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data\n // producer without waiting for the queued writes to finish.\n stream._writeRequests = new SimpleQueue();\n // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents\n // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.\n stream._inFlightWriteRequest = undefined;\n // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer\n // has been detached.\n stream._closeRequest = undefined;\n // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it\n // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.\n stream._inFlightCloseRequest = undefined;\n // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.\n stream._pendingAbortRequest = undefined;\n // The backpressure signal set by the controller.\n stream._backpressure = false;\n }\n function IsWritableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {\n return false;\n }\n return x instanceof WritableStream;\n }\n function IsWritableStreamLocked(stream) {\n if (stream._writer === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamAbort(stream, reason) {\n var _a;\n if (stream._state === 'closed' || stream._state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n stream._writableStreamController._abortReason = reason;\n (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();\n // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',\n // but it doesn't know that signaling abort runs author code that might have changed the state.\n // Widen the type again by casting to WritableStreamState.\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n if (stream._pendingAbortRequest !== undefined) {\n return stream._pendingAbortRequest._promise;\n }\n let wasAlreadyErroring = false;\n if (state === 'erroring') {\n wasAlreadyErroring = true;\n // reason will not be used, so don't keep a reference to it.\n reason = undefined;\n }\n const promise = newPromise((resolve, reject) => {\n stream._pendingAbortRequest = {\n _promise: undefined,\n _resolve: resolve,\n _reject: reject,\n _reason: reason,\n _wasAlreadyErroring: wasAlreadyErroring\n };\n });\n stream._pendingAbortRequest._promise = promise;\n if (!wasAlreadyErroring) {\n WritableStreamStartErroring(stream, reason);\n }\n return promise;\n }\n function WritableStreamClose(stream) {\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));\n }\n const promise = newPromise((resolve, reject) => {\n const closeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._closeRequest = closeRequest;\n });\n const writer = stream._writer;\n if (writer !== undefined && stream._backpressure && state === 'writable') {\n defaultWriterReadyPromiseResolve(writer);\n }\n WritableStreamDefaultControllerClose(stream._writableStreamController);\n return promise;\n }\n // WritableStream API exposed for controllers.\n function WritableStreamAddWriteRequest(stream) {\n const promise = newPromise((resolve, reject) => {\n const writeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._writeRequests.push(writeRequest);\n });\n return promise;\n }\n function WritableStreamDealWithRejection(stream, error) {\n const state = stream._state;\n if (state === 'writable') {\n WritableStreamStartErroring(stream, error);\n return;\n }\n WritableStreamFinishErroring(stream);\n }\n function WritableStreamStartErroring(stream, reason) {\n const controller = stream._writableStreamController;\n stream._state = 'erroring';\n stream._storedError = reason;\n const writer = stream._writer;\n if (writer !== undefined) {\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);\n }\n if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {\n WritableStreamFinishErroring(stream);\n }\n }\n function WritableStreamFinishErroring(stream) {\n stream._state = 'errored';\n stream._writableStreamController[ErrorSteps]();\n const storedError = stream._storedError;\n stream._writeRequests.forEach(writeRequest => {\n writeRequest._reject(storedError);\n });\n stream._writeRequests = new SimpleQueue();\n if (stream._pendingAbortRequest === undefined) {\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const abortRequest = stream._pendingAbortRequest;\n stream._pendingAbortRequest = undefined;\n if (abortRequest._wasAlreadyErroring) {\n abortRequest._reject(storedError);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);\n uponPromise(promise, () => {\n abortRequest._resolve();\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n }, (reason) => {\n abortRequest._reject(reason);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n });\n }\n function WritableStreamFinishInFlightWrite(stream) {\n stream._inFlightWriteRequest._resolve(undefined);\n stream._inFlightWriteRequest = undefined;\n }\n function WritableStreamFinishInFlightWriteWithError(stream, error) {\n stream._inFlightWriteRequest._reject(error);\n stream._inFlightWriteRequest = undefined;\n WritableStreamDealWithRejection(stream, error);\n }\n function WritableStreamFinishInFlightClose(stream) {\n stream._inFlightCloseRequest._resolve(undefined);\n stream._inFlightCloseRequest = undefined;\n const state = stream._state;\n if (state === 'erroring') {\n // The error was too late to do anything, so it is ignored.\n stream._storedError = undefined;\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._resolve();\n stream._pendingAbortRequest = undefined;\n }\n }\n stream._state = 'closed';\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseResolve(writer);\n }\n }\n function WritableStreamFinishInFlightCloseWithError(stream, error) {\n stream._inFlightCloseRequest._reject(error);\n stream._inFlightCloseRequest = undefined;\n // Never execute sink abort() after sink close().\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._reject(error);\n stream._pendingAbortRequest = undefined;\n }\n WritableStreamDealWithRejection(stream, error);\n }\n // TODO(ricea): Fix alphabetical order.\n function WritableStreamCloseQueuedOrInFlight(stream) {\n if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamHasOperationMarkedInFlight(stream) {\n if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamMarkCloseRequestInFlight(stream) {\n stream._inFlightCloseRequest = stream._closeRequest;\n stream._closeRequest = undefined;\n }\n function WritableStreamMarkFirstWriteRequestInFlight(stream) {\n stream._inFlightWriteRequest = stream._writeRequests.shift();\n }\n function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {\n if (stream._closeRequest !== undefined) {\n stream._closeRequest._reject(stream._storedError);\n stream._closeRequest = undefined;\n }\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseReject(writer, stream._storedError);\n }\n }\n function WritableStreamUpdateBackpressure(stream, backpressure) {\n const writer = stream._writer;\n if (writer !== undefined && backpressure !== stream._backpressure) {\n if (backpressure) {\n defaultWriterReadyPromiseReset(writer);\n }\n else {\n defaultWriterReadyPromiseResolve(writer);\n }\n }\n stream._backpressure = backpressure;\n }\n /**\n * A default writer vended by a {@link WritableStream}.\n *\n * @public\n */\n class WritableStreamDefaultWriter {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');\n assertWritableStream(stream, 'First parameter');\n if (IsWritableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive writing by another writer');\n }\n this._ownerWritableStream = stream;\n stream._writer = this;\n const state = stream._state;\n if (state === 'writable') {\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {\n defaultWriterReadyPromiseInitialize(this);\n }\n else {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n }\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'erroring') {\n defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'closed') {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n defaultWriterClosedPromiseInitializeAsResolved(this);\n }\n else {\n const storedError = stream._storedError;\n defaultWriterReadyPromiseInitializeAsRejected(this, storedError);\n defaultWriterClosedPromiseInitializeAsRejected(this, storedError);\n }\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the writer’s lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.\n * A producer can use this information to determine the right amount of data to write.\n *\n * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort\n * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when\n * the writer’s lock is released.\n */\n get desiredSize() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('desiredSize');\n }\n if (this._ownerWritableStream === undefined) {\n throw defaultWriterLockException('desiredSize');\n }\n return WritableStreamDefaultWriterGetDesiredSize(this);\n }\n /**\n * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions\n * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips\n * back to zero or below, the getter will return a new promise that stays pending until the next transition.\n *\n * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become\n * rejected.\n */\n get ready() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('ready'));\n }\n return this._readyPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.\n */\n abort(reason = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('abort'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('abort'));\n }\n return WritableStreamDefaultWriterAbort(this, reason);\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.\n */\n close() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('close'));\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('close'));\n }\n if (WritableStreamCloseQueuedOrInFlight(stream)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamDefaultWriterClose(this);\n }\n /**\n * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.\n * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from\n * now on; otherwise, the writer will appear closed.\n *\n * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the\n * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).\n * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents\n * other producers from writing in an interleaved manner.\n */\n releaseLock() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('releaseLock');\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return;\n }\n WritableStreamDefaultWriterRelease(this);\n }\n write(chunk = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('write'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n return WritableStreamDefaultWriterWrite(this, chunk);\n }\n }\n Object.defineProperties(WritableStreamDefaultWriter.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n releaseLock: { enumerable: true },\n write: { enumerable: true },\n closed: { enumerable: true },\n desiredSize: { enumerable: true },\n ready: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultWriter',\n configurable: true\n });\n }\n // Abstract operations for the WritableStreamDefaultWriter.\n function IsWritableStreamDefaultWriter(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultWriter;\n }\n // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.\n function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }\n function WritableStreamDefaultWriterClose(writer) {\n const stream = writer._ownerWritableStream;\n return WritableStreamClose(stream);\n }\n function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n return WritableStreamDefaultWriterClose(writer);\n }\n function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {\n if (writer._closedPromiseState === 'pending') {\n defaultWriterClosedPromiseReject(writer, error);\n }\n else {\n defaultWriterClosedPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {\n if (writer._readyPromiseState === 'pending') {\n defaultWriterReadyPromiseReject(writer, error);\n }\n else {\n defaultWriterReadyPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterGetDesiredSize(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (state === 'errored' || state === 'erroring') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);\n }\n function WritableStreamDefaultWriterRelease(writer) {\n const stream = writer._ownerWritableStream;\n const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);\n // The state transitions to \"errored\" before the sink abort() method runs, but the writer.closed promise is not\n // rejected until afterwards. This means that simply testing state will not work.\n WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);\n stream._writer = undefined;\n writer._ownerWritableStream = undefined;\n }\n function WritableStreamDefaultWriterWrite(writer, chunk) {\n const stream = writer._ownerWritableStream;\n const controller = stream._writableStreamController;\n const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);\n if (stream !== writer._ownerWritableStream) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n const state = stream._state;\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));\n }\n if (state === 'erroring') {\n return promiseRejectedWith(stream._storedError);\n }\n const promise = WritableStreamAddWriteRequest(stream);\n WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);\n return promise;\n }\n const closeSentinel = {};\n /**\n * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.\n *\n * @public\n */\n class WritableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.\n *\n * @deprecated\n * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.\n * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.\n */\n get abortReason() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('abortReason');\n }\n return this._abortReason;\n }\n /**\n * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.\n */\n get signal() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('signal');\n }\n if (this._abortController === undefined) {\n // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.\n // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,\n // so instead we only implement support for `signal` if we find a global `AbortController` constructor.\n throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');\n }\n return this._abortController.signal;\n }\n /**\n * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.\n *\n * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying\n * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the\n * normal lifecycle of interactions with the underlying sink.\n */\n error(e = undefined) {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('error');\n }\n const state = this._controlledWritableStream._state;\n if (state !== 'writable') {\n // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so\n // just treat it as a no-op.\n return;\n }\n WritableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [AbortSteps](reason) {\n const result = this._abortAlgorithm(reason);\n WritableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [ErrorSteps]() {\n ResetQueue(this);\n }\n }\n Object.defineProperties(WritableStreamDefaultController.prototype, {\n abortReason: { enumerable: true },\n signal: { enumerable: true },\n error: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations implementing interface required by the WritableStream.\n function IsWritableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultController;\n }\n function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledWritableStream = stream;\n stream._writableStreamController = controller;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._abortReason = undefined;\n controller._abortController = createAbortController();\n controller._started = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._writeAlgorithm = writeAlgorithm;\n controller._closeAlgorithm = closeAlgorithm;\n controller._abortAlgorithm = abortAlgorithm;\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n const startResult = startAlgorithm();\n const startPromise = promiseResolvedWith(startResult);\n uponPromise(startPromise, () => {\n controller._started = true;\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, r => {\n controller._started = true;\n WritableStreamDealWithRejection(stream, r);\n });\n }\n function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(WritableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let writeAlgorithm = () => promiseResolvedWith(undefined);\n let closeAlgorithm = () => promiseResolvedWith(undefined);\n let abortAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSink.start !== undefined) {\n startAlgorithm = () => underlyingSink.start(controller);\n }\n if (underlyingSink.write !== undefined) {\n writeAlgorithm = chunk => underlyingSink.write(chunk, controller);\n }\n if (underlyingSink.close !== undefined) {\n closeAlgorithm = () => underlyingSink.close();\n }\n if (underlyingSink.abort !== undefined) {\n abortAlgorithm = reason => underlyingSink.abort(reason);\n }\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.\n function WritableStreamDefaultControllerClearAlgorithms(controller) {\n controller._writeAlgorithm = undefined;\n controller._closeAlgorithm = undefined;\n controller._abortAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n function WritableStreamDefaultControllerClose(controller) {\n EnqueueValueWithSize(controller, closeSentinel, 0);\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {\n try {\n return controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);\n return 1;\n }\n }\n function WritableStreamDefaultControllerGetDesiredSize(controller) {\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);\n return;\n }\n const stream = controller._controlledWritableStream;\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n // Abstract operations for the WritableStreamDefaultController.\n function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {\n const stream = controller._controlledWritableStream;\n if (!controller._started) {\n return;\n }\n if (stream._inFlightWriteRequest !== undefined) {\n return;\n }\n const state = stream._state;\n if (state === 'erroring') {\n WritableStreamFinishErroring(stream);\n return;\n }\n if (controller._queue.length === 0) {\n return;\n }\n const value = PeekQueueValue(controller);\n if (value === closeSentinel) {\n WritableStreamDefaultControllerProcessClose(controller);\n }\n else {\n WritableStreamDefaultControllerProcessWrite(controller, value);\n }\n }\n function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {\n if (controller._controlledWritableStream._state === 'writable') {\n WritableStreamDefaultControllerError(controller, error);\n }\n }\n function WritableStreamDefaultControllerProcessClose(controller) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkCloseRequestInFlight(stream);\n DequeueValue(controller);\n const sinkClosePromise = controller._closeAlgorithm();\n WritableStreamDefaultControllerClearAlgorithms(controller);\n uponPromise(sinkClosePromise, () => {\n WritableStreamFinishInFlightClose(stream);\n }, reason => {\n WritableStreamFinishInFlightCloseWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerProcessWrite(controller, chunk) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkFirstWriteRequestInFlight(stream);\n const sinkWritePromise = controller._writeAlgorithm(chunk);\n uponPromise(sinkWritePromise, () => {\n WritableStreamFinishInFlightWrite(stream);\n const state = stream._state;\n DequeueValue(controller);\n if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, reason => {\n if (stream._state === 'writable') {\n WritableStreamDefaultControllerClearAlgorithms(controller);\n }\n WritableStreamFinishInFlightWriteWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerGetBackpressure(controller) {\n const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);\n return desiredSize <= 0;\n }\n // A client of WritableStreamDefaultController may use these functions directly to bypass state check.\n function WritableStreamDefaultControllerError(controller, error) {\n const stream = controller._controlledWritableStream;\n WritableStreamDefaultControllerClearAlgorithms(controller);\n WritableStreamStartErroring(stream, error);\n }\n // Helper functions for the WritableStream.\n function streamBrandCheckException$2(name) {\n return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);\n }\n // Helper functions for the WritableStreamDefaultController.\n function defaultControllerBrandCheckException$2(name) {\n return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);\n }\n // Helper functions for the WritableStreamDefaultWriter.\n function defaultWriterBrandCheckException(name) {\n return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);\n }\n function defaultWriterLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released writer');\n }\n function defaultWriterClosedPromiseInitialize(writer) {\n writer._closedPromise = newPromise((resolve, reject) => {\n writer._closedPromise_resolve = resolve;\n writer._closedPromise_reject = reject;\n writer._closedPromiseState = 'pending';\n });\n }\n function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseReject(writer, reason);\n }\n function defaultWriterClosedPromiseInitializeAsResolved(writer) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseResolve(writer);\n }\n function defaultWriterClosedPromiseReject(writer, reason) {\n if (writer._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._closedPromise);\n writer._closedPromise_reject(reason);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'rejected';\n }\n function defaultWriterClosedPromiseResetToRejected(writer, reason) {\n defaultWriterClosedPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterClosedPromiseResolve(writer) {\n if (writer._closedPromise_resolve === undefined) {\n return;\n }\n writer._closedPromise_resolve(undefined);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'resolved';\n }\n function defaultWriterReadyPromiseInitialize(writer) {\n writer._readyPromise = newPromise((resolve, reject) => {\n writer._readyPromise_resolve = resolve;\n writer._readyPromise_reject = reject;\n });\n writer._readyPromiseState = 'pending';\n }\n function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseReject(writer, reason);\n }\n function defaultWriterReadyPromiseInitializeAsResolved(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseResolve(writer);\n }\n function defaultWriterReadyPromiseReject(writer, reason) {\n if (writer._readyPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._readyPromise);\n writer._readyPromise_reject(reason);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'rejected';\n }\n function defaultWriterReadyPromiseReset(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n }\n function defaultWriterReadyPromiseResetToRejected(writer, reason) {\n defaultWriterReadyPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterReadyPromiseResolve(writer) {\n if (writer._readyPromise_resolve === undefined) {\n return;\n }\n writer._readyPromise_resolve(undefined);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'fulfilled';\n }\n\n /// \n const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;\n\n /// \n function isDOMExceptionConstructor(ctor) {\n if (!(typeof ctor === 'function' || typeof ctor === 'object')) {\n return false;\n }\n try {\n new ctor();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n function createDOMExceptionPolyfill() {\n // eslint-disable-next-line no-shadow\n const ctor = function DOMException(message, name) {\n this.message = message || '';\n this.name = name || 'Error';\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n };\n ctor.prototype = Object.create(Error.prototype);\n Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });\n return ctor;\n }\n // eslint-disable-next-line no-redeclare\n const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();\n\n function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {\n const reader = AcquireReadableStreamDefaultReader(source);\n const writer = AcquireWritableStreamDefaultWriter(dest);\n source._disturbed = true;\n let shuttingDown = false;\n // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.\n let currentWrite = promiseResolvedWith(undefined);\n return newPromise((resolve, reject) => {\n let abortAlgorithm;\n if (signal !== undefined) {\n abortAlgorithm = () => {\n const error = new DOMException$1('Aborted', 'AbortError');\n const actions = [];\n if (!preventAbort) {\n actions.push(() => {\n if (dest._state === 'writable') {\n return WritableStreamAbort(dest, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n if (!preventCancel) {\n actions.push(() => {\n if (source._state === 'readable') {\n return ReadableStreamCancel(source, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);\n };\n if (signal.aborted) {\n abortAlgorithm();\n return;\n }\n signal.addEventListener('abort', abortAlgorithm);\n }\n // Using reader and writer, read all chunks from this and write them to dest\n // - Backpressure must be enforced\n // - Shutdown must stop all activity\n function pipeLoop() {\n return newPromise((resolveLoop, rejectLoop) => {\n function next(done) {\n if (done) {\n resolveLoop();\n }\n else {\n // Use `PerformPromiseThen` instead of `uponPromise` to avoid\n // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers\n PerformPromiseThen(pipeStep(), next, rejectLoop);\n }\n }\n next(false);\n });\n }\n function pipeStep() {\n if (shuttingDown) {\n return promiseResolvedWith(true);\n }\n return PerformPromiseThen(writer._readyPromise, () => {\n return newPromise((resolveRead, rejectRead) => {\n ReadableStreamDefaultReaderRead(reader, {\n _chunkSteps: chunk => {\n currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);\n resolveRead(false);\n },\n _closeSteps: () => resolveRead(true),\n _errorSteps: rejectRead\n });\n });\n });\n }\n // Errors must be propagated forward\n isOrBecomesErrored(source, reader._closedPromise, storedError => {\n if (!preventAbort) {\n shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Errors must be propagated backward\n isOrBecomesErrored(dest, writer._closedPromise, storedError => {\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Closing must be propagated forward\n isOrBecomesClosed(source, reader._closedPromise, () => {\n if (!preventClose) {\n shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));\n }\n else {\n shutdown();\n }\n });\n // Closing must be propagated backward\n if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {\n const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);\n }\n else {\n shutdown(true, destClosed);\n }\n }\n setPromiseIsHandledToTrue(pipeLoop());\n function waitForWritesToFinish() {\n // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait\n // for that too.\n const oldCurrentWrite = currentWrite;\n return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);\n }\n function isOrBecomesErrored(stream, promise, action) {\n if (stream._state === 'errored') {\n action(stream._storedError);\n }\n else {\n uponRejection(promise, action);\n }\n }\n function isOrBecomesClosed(stream, promise, action) {\n if (stream._state === 'closed') {\n action();\n }\n else {\n uponFulfillment(promise, action);\n }\n }\n function shutdownWithAction(action, originalIsError, originalError) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), doTheRest);\n }\n else {\n doTheRest();\n }\n function doTheRest() {\n uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));\n }\n }\n function shutdown(isError, error) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));\n }\n else {\n finalize(isError, error);\n }\n }\n function finalize(isError, error) {\n WritableStreamDefaultWriterRelease(writer);\n ReadableStreamReaderGenericRelease(reader);\n if (signal !== undefined) {\n signal.removeEventListener('abort', abortAlgorithm);\n }\n if (isError) {\n reject(error);\n }\n else {\n resolve(undefined);\n }\n }\n });\n }\n\n /**\n * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('close');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits close');\n }\n ReadableStreamDefaultControllerClose(this);\n }\n enqueue(chunk = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('enqueue');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits enqueue');\n }\n return ReadableStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('error');\n }\n ReadableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableStream;\n if (this._queue.length > 0) {\n const chunk = DequeueValue(this);\n if (this._closeRequested && this._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(this);\n ReadableStreamClose(stream);\n }\n else {\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n readRequest._chunkSteps(chunk);\n }\n else {\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n }\n }\n Object.defineProperties(ReadableStreamDefaultController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableStreamDefaultController.\n function IsReadableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultController;\n }\n function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableStreamDefaultControllerError(controller, e);\n });\n }\n function ReadableStreamDefaultControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableStream;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableStreamDefaultControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.\n function ReadableStreamDefaultControllerClose(controller) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n controller._closeRequested = true;\n if (controller._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n }\n function ReadableStreamDefaultControllerEnqueue(controller, chunk) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n ReadableStreamFulfillReadRequest(stream, chunk, false);\n }\n else {\n let chunkSize;\n try {\n chunkSize = controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n ReadableStreamDefaultControllerError(controller, chunkSizeE);\n throw chunkSizeE;\n }\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n ReadableStreamDefaultControllerError(controller, enqueueE);\n throw enqueueE;\n }\n }\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n function ReadableStreamDefaultControllerError(controller, e) {\n const stream = controller._controlledReadableStream;\n if (stream._state !== 'readable') {\n return;\n }\n ResetQueue(controller);\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableStreamDefaultControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n // This is used in the implementation of TransformStream.\n function ReadableStreamDefaultControllerHasBackpressure(controller) {\n if (ReadableStreamDefaultControllerShouldCallPull(controller)) {\n return false;\n }\n return true;\n }\n function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {\n const state = controller._controlledReadableStream._state;\n if (!controller._closeRequested && state === 'readable') {\n return true;\n }\n return false;\n }\n function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledReadableStream = stream;\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._started = false;\n controller._closeRequested = false;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableStreamDefaultControllerError(controller, r);\n });\n }\n function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSource.start !== undefined) {\n startAlgorithm = () => underlyingSource.start(controller);\n }\n if (underlyingSource.pull !== undefined) {\n pullAlgorithm = () => underlyingSource.pull(controller);\n }\n if (underlyingSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingSource.cancel(reason);\n }\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // Helper functions for the ReadableStreamDefaultController.\n function defaultControllerBrandCheckException$1(name) {\n return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);\n }\n\n function ReadableStreamTee(stream, cloneForBranch2) {\n if (IsReadableByteStreamController(stream._readableStreamController)) {\n return ReadableByteStreamTee(stream);\n }\n return ReadableStreamDefaultTee(stream);\n }\n function ReadableStreamDefaultTee(stream, cloneForBranch2) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgain = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function pullAlgorithm() {\n if (reading) {\n readAgain = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgain = false;\n const chunk1 = chunk;\n const chunk2 = chunk;\n // There is no way to access the cloning code right now in the reference implementation.\n // If we add one then we'll need an implementation for serializable objects.\n // if (!canceled2 && cloneForBranch2) {\n // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));\n // }\n if (!canceled1) {\n ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgain) {\n pullAlgorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableStreamDefaultControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerClose(branch2._readableStreamController);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n // do nothing\n }\n branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);\n branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);\n uponRejection(reader._closedPromise, (r) => {\n ReadableStreamDefaultControllerError(branch1._readableStreamController, r);\n ReadableStreamDefaultControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n return [branch1, branch2];\n }\n function ReadableByteStreamTee(stream) {\n let reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgainForBranch1 = false;\n let readAgainForBranch2 = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function forwardReaderError(thisReader) {\n uponRejection(thisReader._closedPromise, r => {\n if (thisReader !== reader) {\n return;\n }\n ReadableByteStreamControllerError(branch1._readableStreamController, r);\n ReadableByteStreamControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n }\n function pullWithDefaultReader() {\n if (IsReadableStreamBYOBReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamDefaultReader(stream);\n forwardReaderError(reader);\n }\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const chunk1 = chunk;\n let chunk2 = chunk;\n if (!canceled1 && !canceled2) {\n try {\n chunk2 = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);\n ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n }\n if (!canceled1) {\n ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableByteStreamControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableByteStreamControllerClose(branch2._readableStreamController);\n }\n if (branch1._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);\n }\n if (branch2._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n }\n function pullWithBYOBReader(view, forBranch2) {\n if (IsReadableStreamDefaultReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamBYOBReader(stream);\n forwardReaderError(reader);\n }\n const byobBranch = forBranch2 ? branch2 : branch1;\n const otherBranch = forBranch2 ? branch1 : branch2;\n const readIntoRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!otherCanceled) {\n let clonedChunk;\n try {\n clonedChunk = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);\n ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);\n }\n else if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: chunk => {\n reading = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!byobCanceled) {\n ReadableByteStreamControllerClose(byobBranch._readableStreamController);\n }\n if (!otherCanceled) {\n ReadableByteStreamControllerClose(otherBranch._readableStreamController);\n }\n if (chunk !== undefined) {\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);\n }\n }\n if (!byobCanceled || !otherCanceled) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);\n }\n function pull1Algorithm() {\n if (reading) {\n readAgainForBranch1 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, false);\n }\n return promiseResolvedWith(undefined);\n }\n function pull2Algorithm() {\n if (reading) {\n readAgainForBranch2 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, true);\n }\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n return;\n }\n branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);\n branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);\n forwardReaderError(reader);\n return [branch1, branch2];\n }\n\n function convertUnderlyingDefaultOrByteSource(source, context) {\n assertDictionary(source, context);\n const original = source;\n const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;\n const cancel = original === null || original === void 0 ? void 0 : original.cancel;\n const pull = original === null || original === void 0 ? void 0 : original.pull;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n return {\n autoAllocateChunkSize: autoAllocateChunkSize === undefined ?\n undefined :\n convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),\n cancel: cancel === undefined ?\n undefined :\n convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),\n pull: pull === undefined ?\n undefined :\n convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),\n type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)\n };\n }\n function convertUnderlyingSourceCancelCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSourcePullCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertUnderlyingSourceStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertReadableStreamType(type, context) {\n type = `${type}`;\n if (type !== 'bytes') {\n throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);\n }\n return type;\n }\n\n function convertReaderOptions(options, context) {\n assertDictionary(options, context);\n const mode = options === null || options === void 0 ? void 0 : options.mode;\n return {\n mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)\n };\n }\n function convertReadableStreamReaderMode(mode, context) {\n mode = `${mode}`;\n if (mode !== 'byob') {\n throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);\n }\n return mode;\n }\n\n function convertIteratorOptions(options, context) {\n assertDictionary(options, context);\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n return { preventCancel: Boolean(preventCancel) };\n }\n\n function convertPipeOptions(options, context) {\n assertDictionary(options, context);\n const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;\n const signal = options === null || options === void 0 ? void 0 : options.signal;\n if (signal !== undefined) {\n assertAbortSignal(signal, `${context} has member 'signal' that`);\n }\n return {\n preventAbort: Boolean(preventAbort),\n preventCancel: Boolean(preventCancel),\n preventClose: Boolean(preventClose),\n signal\n };\n }\n function assertAbortSignal(signal, context) {\n if (!isAbortSignal(signal)) {\n throw new TypeError(`${context} is not an AbortSignal.`);\n }\n }\n\n function convertReadableWritablePair(pair, context) {\n assertDictionary(pair, context);\n const readable = pair === null || pair === void 0 ? void 0 : pair.readable;\n assertRequiredField(readable, 'readable', 'ReadableWritablePair');\n assertReadableStream(readable, `${context} has member 'readable' that`);\n const writable = pair === null || pair === void 0 ? void 0 : pair.writable;\n assertRequiredField(writable, 'writable', 'ReadableWritablePair');\n assertWritableStream(writable, `${context} has member 'writable' that`);\n return { readable, writable };\n }\n\n /**\n * A readable stream represents a source of data, from which you can read.\n *\n * @public\n */\n class ReadableStream {\n constructor(rawUnderlyingSource = {}, rawStrategy = {}) {\n if (rawUnderlyingSource === undefined) {\n rawUnderlyingSource = null;\n }\n else {\n assertObject(rawUnderlyingSource, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');\n InitializeReadableStream(this);\n if (underlyingSource.type === 'bytes') {\n if (strategy.size !== undefined) {\n throw new RangeError('The strategy for a byte stream cannot have a size function');\n }\n const highWaterMark = ExtractHighWaterMark(strategy, 0);\n SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);\n }\n else {\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);\n }\n }\n /**\n * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.\n */\n get locked() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('locked');\n }\n return IsReadableStreamLocked(this);\n }\n /**\n * Cancels the stream, signaling a loss of interest in the stream by a consumer.\n *\n * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}\n * method, which might or might not use it.\n */\n cancel(reason = undefined) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('cancel'));\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));\n }\n return ReadableStreamCancel(this, reason);\n }\n getReader(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('getReader');\n }\n const options = convertReaderOptions(rawOptions, 'First parameter');\n if (options.mode === undefined) {\n return AcquireReadableStreamDefaultReader(this);\n }\n return AcquireReadableStreamBYOBReader(this);\n }\n pipeThrough(rawTransform, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('pipeThrough');\n }\n assertRequiredArgument(rawTransform, 1, 'pipeThrough');\n const transform = convertReadableWritablePair(rawTransform, 'First parameter');\n const options = convertPipeOptions(rawOptions, 'Second parameter');\n if (IsReadableStreamLocked(this)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');\n }\n if (IsWritableStreamLocked(transform.writable)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');\n }\n const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n setPromiseIsHandledToTrue(promise);\n return transform.readable;\n }\n pipeTo(destination, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));\n }\n if (destination === undefined) {\n return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);\n }\n if (!IsWritableStream(destination)) {\n return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));\n }\n let options;\n try {\n options = convertPipeOptions(rawOptions, 'Second parameter');\n }\n catch (e) {\n return promiseRejectedWith(e);\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));\n }\n if (IsWritableStreamLocked(destination)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));\n }\n return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n }\n /**\n * Tees this readable stream, returning a two-element array containing the two resulting branches as\n * new {@link ReadableStream} instances.\n *\n * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.\n * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be\n * propagated to the stream's underlying source.\n *\n * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,\n * this could allow interference between the two branches.\n */\n tee() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('tee');\n }\n const branches = ReadableStreamTee(this);\n return CreateArrayFromList(branches);\n }\n values(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('values');\n }\n const options = convertIteratorOptions(rawOptions, 'First parameter');\n return AcquireReadableStreamAsyncIterator(this, options.preventCancel);\n }\n }\n Object.defineProperties(ReadableStream.prototype, {\n cancel: { enumerable: true },\n getReader: { enumerable: true },\n pipeThrough: { enumerable: true },\n pipeTo: { enumerable: true },\n tee: { enumerable: true },\n values: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStream',\n configurable: true\n });\n }\n if (typeof SymbolPolyfill.asyncIterator === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {\n value: ReadableStream.prototype.values,\n writable: true,\n configurable: true\n });\n }\n // Abstract operations for the ReadableStream.\n // Throws if and only if startAlgorithm throws.\n function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n // Throws if and only if startAlgorithm throws.\n function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableByteStreamController.prototype);\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);\n return stream;\n }\n function InitializeReadableStream(stream) {\n stream._state = 'readable';\n stream._reader = undefined;\n stream._storedError = undefined;\n stream._disturbed = false;\n }\n function IsReadableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {\n return false;\n }\n return x instanceof ReadableStream;\n }\n function IsReadableStreamLocked(stream) {\n if (stream._reader === undefined) {\n return false;\n }\n return true;\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamCancel(stream, reason) {\n stream._disturbed = true;\n if (stream._state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (stream._state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n ReadableStreamClose(stream);\n const reader = stream._reader;\n if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._closeSteps(undefined);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);\n return transformPromiseWith(sourceCancelPromise, noop);\n }\n function ReadableStreamClose(stream) {\n stream._state = 'closed';\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseResolve(reader);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._closeSteps();\n });\n reader._readRequests = new SimpleQueue();\n }\n }\n function ReadableStreamError(stream, e) {\n stream._state = 'errored';\n stream._storedError = e;\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseReject(reader, e);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._errorSteps(e);\n });\n reader._readRequests = new SimpleQueue();\n }\n else {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._errorSteps(e);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n }\n // Helper functions for the ReadableStream.\n function streamBrandCheckException$1(name) {\n return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);\n }\n\n function convertQueuingStrategyInit(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');\n return {\n highWaterMark: convertUnrestrictedDouble(highWaterMark)\n };\n }\n\n // The size function must not have a prototype property nor be a constructor\n const byteLengthSizeFunction = (chunk) => {\n return chunk.byteLength;\n };\n try {\n Object.defineProperty(byteLengthSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of bytes in each chunk.\n *\n * @public\n */\n class ByteLengthQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('highWaterMark');\n }\n return this._byteLengthQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by returning the value of its `byteLength` property.\n */\n get size() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('size');\n }\n return byteLengthSizeFunction;\n }\n }\n Object.defineProperties(ByteLengthQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'ByteLengthQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the ByteLengthQueuingStrategy.\n function byteLengthBrandCheckException(name) {\n return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);\n }\n function IsByteLengthQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof ByteLengthQueuingStrategy;\n }\n\n // The size function must not have a prototype property nor be a constructor\n const countSizeFunction = () => {\n return 1;\n };\n try {\n Object.defineProperty(countSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of chunks.\n *\n * @public\n */\n class CountQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'CountQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._countQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('highWaterMark');\n }\n return this._countQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by always returning 1.\n * This ensures that the total queue size is a count of the number of chunks in the queue.\n */\n get size() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('size');\n }\n return countSizeFunction;\n }\n }\n Object.defineProperties(CountQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'CountQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the CountQueuingStrategy.\n function countBrandCheckException(name) {\n return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);\n }\n function IsCountQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof CountQueuingStrategy;\n }\n\n function convertTransformer(original, context) {\n assertDictionary(original, context);\n const flush = original === null || original === void 0 ? void 0 : original.flush;\n const readableType = original === null || original === void 0 ? void 0 : original.readableType;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const transform = original === null || original === void 0 ? void 0 : original.transform;\n const writableType = original === null || original === void 0 ? void 0 : original.writableType;\n return {\n flush: flush === undefined ?\n undefined :\n convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),\n readableType,\n start: start === undefined ?\n undefined :\n convertTransformerStartCallback(start, original, `${context} has member 'start' that`),\n transform: transform === undefined ?\n undefined :\n convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),\n writableType\n };\n }\n function convertTransformerFlushCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertTransformerStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertTransformerTransformCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n // Class TransformStream\n /**\n * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},\n * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.\n * In a manner specific to the transform stream in question, writes to the writable side result in new data being\n * made available for reading from the readable side.\n *\n * @public\n */\n class TransformStream {\n constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {\n if (rawTransformer === undefined) {\n rawTransformer = null;\n }\n const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');\n const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');\n const transformer = convertTransformer(rawTransformer, 'First parameter');\n if (transformer.readableType !== undefined) {\n throw new RangeError('Invalid readableType specified');\n }\n if (transformer.writableType !== undefined) {\n throw new RangeError('Invalid writableType specified');\n }\n const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);\n const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);\n const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);\n const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);\n let startPromise_resolve;\n const startPromise = newPromise(resolve => {\n startPromise_resolve = resolve;\n });\n InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);\n if (transformer.start !== undefined) {\n startPromise_resolve(transformer.start(this._transformStreamController));\n }\n else {\n startPromise_resolve(undefined);\n }\n }\n /**\n * The readable side of the transform stream.\n */\n get readable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('readable');\n }\n return this._readable;\n }\n /**\n * The writable side of the transform stream.\n */\n get writable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('writable');\n }\n return this._writable;\n }\n }\n Object.defineProperties(TransformStream.prototype, {\n readable: { enumerable: true },\n writable: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStream',\n configurable: true\n });\n }\n function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {\n function startAlgorithm() {\n return startPromise;\n }\n function writeAlgorithm(chunk) {\n return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);\n }\n function abortAlgorithm(reason) {\n return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);\n }\n function closeAlgorithm() {\n return TransformStreamDefaultSinkCloseAlgorithm(stream);\n }\n stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);\n function pullAlgorithm() {\n return TransformStreamDefaultSourcePullAlgorithm(stream);\n }\n function cancelAlgorithm(reason) {\n TransformStreamErrorWritableAndUnblockWrite(stream, reason);\n return promiseResolvedWith(undefined);\n }\n stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.\n stream._backpressure = undefined;\n stream._backpressureChangePromise = undefined;\n stream._backpressureChangePromise_resolve = undefined;\n TransformStreamSetBackpressure(stream, true);\n stream._transformStreamController = undefined;\n }\n function IsTransformStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {\n return false;\n }\n return x instanceof TransformStream;\n }\n // This is a no-op if both sides are already errored.\n function TransformStreamError(stream, e) {\n ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n }\n function TransformStreamErrorWritableAndUnblockWrite(stream, e) {\n TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);\n WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);\n if (stream._backpressure) {\n // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()\n // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time\n // _backpressure is set.\n TransformStreamSetBackpressure(stream, false);\n }\n }\n function TransformStreamSetBackpressure(stream, backpressure) {\n // Passes also when called during construction.\n if (stream._backpressureChangePromise !== undefined) {\n stream._backpressureChangePromise_resolve();\n }\n stream._backpressureChangePromise = newPromise(resolve => {\n stream._backpressureChangePromise_resolve = resolve;\n });\n stream._backpressure = backpressure;\n }\n // Class TransformStreamDefaultController\n /**\n * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.\n *\n * @public\n */\n class TransformStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.\n */\n get desiredSize() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('desiredSize');\n }\n const readableController = this._controlledTransformStream._readable._readableStreamController;\n return ReadableStreamDefaultControllerGetDesiredSize(readableController);\n }\n enqueue(chunk = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('enqueue');\n }\n TransformStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors both the readable side and the writable side of the controlled transform stream, making all future\n * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.\n */\n error(reason = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('error');\n }\n TransformStreamDefaultControllerError(this, reason);\n }\n /**\n * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the\n * transformer only needs to consume a portion of the chunks written to the writable side.\n */\n terminate() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('terminate');\n }\n TransformStreamDefaultControllerTerminate(this);\n }\n }\n Object.defineProperties(TransformStreamDefaultController.prototype, {\n enqueue: { enumerable: true },\n error: { enumerable: true },\n terminate: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStreamDefaultController',\n configurable: true\n });\n }\n // Transform Stream Default Controller Abstract Operations\n function IsTransformStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {\n return false;\n }\n return x instanceof TransformStreamDefaultController;\n }\n function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {\n controller._controlledTransformStream = stream;\n stream._transformStreamController = controller;\n controller._transformAlgorithm = transformAlgorithm;\n controller._flushAlgorithm = flushAlgorithm;\n }\n function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {\n const controller = Object.create(TransformStreamDefaultController.prototype);\n let transformAlgorithm = (chunk) => {\n try {\n TransformStreamDefaultControllerEnqueue(controller, chunk);\n return promiseResolvedWith(undefined);\n }\n catch (transformResultE) {\n return promiseRejectedWith(transformResultE);\n }\n };\n let flushAlgorithm = () => promiseResolvedWith(undefined);\n if (transformer.transform !== undefined) {\n transformAlgorithm = chunk => transformer.transform(chunk, controller);\n }\n if (transformer.flush !== undefined) {\n flushAlgorithm = () => transformer.flush(controller);\n }\n SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);\n }\n function TransformStreamDefaultControllerClearAlgorithms(controller) {\n controller._transformAlgorithm = undefined;\n controller._flushAlgorithm = undefined;\n }\n function TransformStreamDefaultControllerEnqueue(controller, chunk) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {\n throw new TypeError('Readable side is not in a state that permits enqueue');\n }\n // We throttle transform invocations based on the backpressure of the ReadableStream, but we still\n // accept TransformStreamDefaultControllerEnqueue() calls.\n try {\n ReadableStreamDefaultControllerEnqueue(readableController, chunk);\n }\n catch (e) {\n // This happens when readableStrategy.size() throws.\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n throw stream._readable._storedError;\n }\n const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);\n if (backpressure !== stream._backpressure) {\n TransformStreamSetBackpressure(stream, true);\n }\n }\n function TransformStreamDefaultControllerError(controller, e) {\n TransformStreamError(controller._controlledTransformStream, e);\n }\n function TransformStreamDefaultControllerPerformTransform(controller, chunk) {\n const transformPromise = controller._transformAlgorithm(chunk);\n return transformPromiseWith(transformPromise, undefined, r => {\n TransformStreamError(controller._controlledTransformStream, r);\n throw r;\n });\n }\n function TransformStreamDefaultControllerTerminate(controller) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n ReadableStreamDefaultControllerClose(readableController);\n const error = new TypeError('TransformStream terminated');\n TransformStreamErrorWritableAndUnblockWrite(stream, error);\n }\n // TransformStreamDefaultSink Algorithms\n function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {\n const controller = stream._transformStreamController;\n if (stream._backpressure) {\n const backpressureChangePromise = stream._backpressureChangePromise;\n return transformPromiseWith(backpressureChangePromise, () => {\n const writable = stream._writable;\n const state = writable._state;\n if (state === 'erroring') {\n throw writable._storedError;\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n });\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n }\n function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {\n // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already\n // errored.\n TransformStreamError(stream, reason);\n return promiseResolvedWith(undefined);\n }\n function TransformStreamDefaultSinkCloseAlgorithm(stream) {\n // stream._readable cannot change after construction, so caching it across a call to user code is safe.\n const readable = stream._readable;\n const controller = stream._transformStreamController;\n const flushPromise = controller._flushAlgorithm();\n TransformStreamDefaultControllerClearAlgorithms(controller);\n // Return a promise that is fulfilled with undefined on success.\n return transformPromiseWith(flushPromise, () => {\n if (readable._state === 'errored') {\n throw readable._storedError;\n }\n ReadableStreamDefaultControllerClose(readable._readableStreamController);\n }, r => {\n TransformStreamError(stream, r);\n throw readable._storedError;\n });\n }\n // TransformStreamDefaultSource Algorithms\n function TransformStreamDefaultSourcePullAlgorithm(stream) {\n // Invariant. Enforced by the promises returned by start() and pull().\n TransformStreamSetBackpressure(stream, false);\n // Prevent the next pull() call until there is backpressure.\n return stream._backpressureChangePromise;\n }\n // Helper functions for the TransformStreamDefaultController.\n function defaultControllerBrandCheckException(name) {\n return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);\n }\n // Helper functions for the TransformStream.\n function streamBrandCheckException(name) {\n return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);\n }\n\n exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;\n exports.CountQueuingStrategy = CountQueuingStrategy;\n exports.ReadableByteStreamController = ReadableByteStreamController;\n exports.ReadableStream = ReadableStream;\n exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;\n exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;\n exports.ReadableStreamDefaultController = ReadableStreamDefaultController;\n exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;\n exports.TransformStream = TransformStream;\n exports.TransformStreamDefaultController = TransformStreamDefaultController;\n exports.WritableStream = WritableStream;\n exports.WritableStreamDefaultController = WritableStreamDefaultController;\n exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;\n\n Object.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=ponyfill.es2018.js.map\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n",null,"module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:fs\");","module.exports = require(\"node:stream\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"worker_threads\");","module.exports = require(\"zlib\");","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Encoder = exports.FormDataEncoder = void 0;\nconst createBoundary_1 = __importDefault(require(\"./util/createBoundary\"));\nconst isPlainObject_1 = __importDefault(require(\"./util/isPlainObject\"));\nconst normalizeValue_1 = __importDefault(require(\"./util/normalizeValue\"));\nconst escapeName_1 = __importDefault(require(\"./util/escapeName\"));\nconst isFileLike_1 = require(\"./util/isFileLike\");\nconst isFormData_1 = require(\"./util/isFormData\");\nconst defaultOptions = {\n enableAdditionalHeaders: false\n};\nclass FormDataEncoder {\n constructor(form, boundaryOrOptions, options) {\n _FormDataEncoder_instances.add(this);\n _FormDataEncoder_CRLF.set(this, \"\\r\\n\");\n _FormDataEncoder_CRLF_BYTES.set(this, void 0);\n _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0);\n _FormDataEncoder_DASHES.set(this, \"-\".repeat(2));\n _FormDataEncoder_encoder.set(this, new TextEncoder());\n _FormDataEncoder_footer.set(this, void 0);\n _FormDataEncoder_form.set(this, void 0);\n _FormDataEncoder_options.set(this, void 0);\n if (!(0, isFormData_1.isFormData)(form)) {\n throw new TypeError(\"Expected first argument to be a FormData instance.\");\n }\n let boundary;\n if ((0, isPlainObject_1.default)(boundaryOrOptions)) {\n options = boundaryOrOptions;\n }\n else {\n boundary = boundaryOrOptions;\n }\n if (!boundary) {\n boundary = (0, createBoundary_1.default)();\n }\n if (typeof boundary !== \"string\") {\n throw new TypeError(\"Expected boundary argument to be a string.\");\n }\n if (options && !(0, isPlainObject_1.default)(options)) {\n throw new TypeError(\"Expected options argument to be an object.\");\n }\n __classPrivateFieldSet(this, _FormDataEncoder_form, form, \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")), \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, \"f\").byteLength, \"f\");\n this.boundary = `form-data-boundary-${boundary}`;\n this.contentType = `multipart/form-data; boundary=${this.boundary}`;\n __classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\").repeat(2)}`), \"f\");\n this.contentLength = String(this.getContentLength());\n this.headers = Object.freeze({\n \"Content-Type\": this.contentType,\n \"Content-Length\": this.contentLength\n });\n Object.defineProperties(this, {\n boundary: { writable: false, configurable: false },\n contentType: { writable: false, configurable: false },\n contentLength: { writable: false, configurable: false },\n headers: { writable: false, configurable: false }\n });\n }\n getContentLength() {\n let length = 0;\n for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, \"f\")) {\n const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode((0, normalizeValue_1.default)(raw));\n length += __classPrivateFieldGet(this, _FormDataEncoder_instances, \"m\", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength;\n length += (0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength;\n length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, \"f\");\n }\n return length + __classPrivateFieldGet(this, _FormDataEncoder_footer, \"f\").byteLength;\n }\n *values() {\n for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, \"f\").entries()) {\n const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode((0, normalizeValue_1.default)(raw));\n yield __classPrivateFieldGet(this, _FormDataEncoder_instances, \"m\", _FormDataEncoder_getFieldHeader).call(this, name, value);\n yield value;\n yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, \"f\");\n }\n yield __classPrivateFieldGet(this, _FormDataEncoder_footer, \"f\");\n }\n async *encode() {\n for (const part of this.values()) {\n if ((0, isFileLike_1.isFileLike)(part)) {\n yield* part.stream();\n }\n else {\n yield part;\n }\n }\n }\n [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) {\n let header = \"\";\n header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}`;\n header += `Content-Disposition: form-data; name=\"${(0, escapeName_1.default)(name)}\"`;\n if ((0, isFileLike_1.isFileLike)(value)) {\n header += `; filename=\"${(0, escapeName_1.default)(value.name)}\"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}`;\n header += `Content-Type: ${value.type || \"application/octet-stream\"}`;\n }\n if (__classPrivateFieldGet(this, _FormDataEncoder_options, \"f\").enableAdditionalHeaders === true) {\n header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}Content-Length: ${(0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength}`;\n }\n return __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\").repeat(2)}`);\n }, Symbol.iterator)]() {\n return this.values();\n }\n [Symbol.asyncIterator]() {\n return this.encode();\n }\n}\nexports.FormDataEncoder = FormDataEncoder;\nexports.Encoder = FormDataEncoder;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./FormDataEncoder\"), exports);\n__exportStar(require(\"./FileLike\"), exports);\n__exportStar(require(\"./FormDataLike\"), exports);\n__exportStar(require(\"./util/isFileLike\"), exports);\n__exportStar(require(\"./util/isFormData\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst alphabet = \"abcdefghijklmnopqrstuvwxyz0123456789\";\nfunction createBoundary() {\n let size = 16;\n let res = \"\";\n while (size--) {\n res += alphabet[(Math.random() * alphabet.length) << 0];\n }\n return res;\n}\nexports.default = createBoundary;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst escapeName = (name) => String(name)\n .replace(/\\r/g, \"%0D\")\n .replace(/\\n/g, \"%0A\")\n .replace(/\"/g, \"%22\");\nexports.default = escapeName;\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFileLike = void 0;\nconst isFunction_1 = __importDefault(require(\"./isFunction\"));\nconst isFileLike = (value) => Boolean(value\n && typeof value === \"object\"\n && (0, isFunction_1.default)(value.constructor)\n && value[Symbol.toStringTag] === \"File\"\n && (0, isFunction_1.default)(value.stream)\n && value.name != null\n && value.size != null\n && value.lastModified != null);\nexports.isFileLike = isFileLike;\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFormDataLike = exports.isFormData = void 0;\nconst isFunction_1 = __importDefault(require(\"./isFunction\"));\nconst isFormData = (value) => Boolean(value\n && (0, isFunction_1.default)(value.constructor)\n && value[Symbol.toStringTag] === \"FormData\"\n && (0, isFunction_1.default)(value.append)\n && (0, isFunction_1.default)(value.getAll)\n && (0, isFunction_1.default)(value.entries)\n && (0, isFunction_1.default)(value[Symbol.iterator]));\nexports.isFormData = isFormData;\nexports.isFormDataLike = exports.isFormData;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst isFunction = (value) => (typeof value === \"function\");\nexports.default = isFunction;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase());\nfunction isPlainObject(value) {\n if (getType(value) !== \"object\") {\n return false;\n }\n const pp = Object.getPrototypeOf(value);\n if (pp === null || pp === undefined) {\n return true;\n }\n const Ctor = pp.constructor && pp.constructor.toString();\n return Ctor === Object.toString();\n}\nexports.default = isPlainObject;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst normalizeValue = (value) => String(value)\n .replace(/\\r|\\n/g, (match, i, str) => {\n if ((match === \"\\r\" && str[i + 1] !== \"\\n\")\n || (match === \"\\n\" && str[i - 1] !== \"\\r\")) {\n return \"\\r\\n\";\n }\n return match;\n});\nexports.default = normalizeValue;\n","\"use strict\";\n/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _Blob_parts, _Blob_type, _Blob_size;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Blob = void 0;\nconst web_streams_polyfill_1 = require(\"web-streams-polyfill\");\nconst isFunction_1 = require(\"./isFunction\");\nconst blobHelpers_1 = require(\"./blobHelpers\");\nclass Blob {\n constructor(blobParts = [], options = {}) {\n _Blob_parts.set(this, []);\n _Blob_type.set(this, \"\");\n _Blob_size.set(this, 0);\n options !== null && options !== void 0 ? options : (options = {});\n if (typeof blobParts !== \"object\" || blobParts === null) {\n throw new TypeError(\"Failed to construct 'Blob': \"\n + \"The provided value cannot be converted to a sequence.\");\n }\n if (!(0, isFunction_1.isFunction)(blobParts[Symbol.iterator])) {\n throw new TypeError(\"Failed to construct 'Blob': \"\n + \"The object must have a callable @@iterator property.\");\n }\n if (typeof options !== \"object\" && !(0, isFunction_1.isFunction)(options)) {\n throw new TypeError(\"Failed to construct 'Blob': parameter 2 cannot convert to dictionary.\");\n }\n const encoder = new TextEncoder();\n for (const raw of blobParts) {\n let part;\n if (ArrayBuffer.isView(raw)) {\n part = new Uint8Array(raw.buffer.slice(raw.byteOffset, raw.byteOffset + raw.byteLength));\n }\n else if (raw instanceof ArrayBuffer) {\n part = new Uint8Array(raw.slice(0));\n }\n else if (raw instanceof Blob) {\n part = raw;\n }\n else {\n part = encoder.encode(String(raw));\n }\n __classPrivateFieldSet(this, _Blob_size, __classPrivateFieldGet(this, _Blob_size, \"f\") + (ArrayBuffer.isView(part) ? part.byteLength : part.size), \"f\");\n __classPrivateFieldGet(this, _Blob_parts, \"f\").push(part);\n }\n const type = options.type === undefined ? \"\" : String(options.type);\n __classPrivateFieldSet(this, _Blob_type, /^[\\x20-\\x7E]*$/.test(type) ? type : \"\", \"f\");\n }\n static [(_Blob_parts = new WeakMap(), _Blob_type = new WeakMap(), _Blob_size = new WeakMap(), Symbol.hasInstance)](value) {\n return Boolean(value\n && typeof value === \"object\"\n && (0, isFunction_1.isFunction)(value.constructor)\n && ((0, isFunction_1.isFunction)(value.stream)\n || (0, isFunction_1.isFunction)(value.arrayBuffer))\n && /^(Blob|File)$/.test(value[Symbol.toStringTag]));\n }\n get type() {\n return __classPrivateFieldGet(this, _Blob_type, \"f\");\n }\n get size() {\n return __classPrivateFieldGet(this, _Blob_size, \"f\");\n }\n slice(start, end, contentType) {\n return new Blob((0, blobHelpers_1.sliceBlob)(__classPrivateFieldGet(this, _Blob_parts, \"f\"), this.size, start, end), {\n type: contentType\n });\n }\n async text() {\n const decoder = new TextDecoder();\n let result = \"\";\n for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"))) {\n result += decoder.decode(chunk, { stream: true });\n }\n result += decoder.decode();\n return result;\n }\n async arrayBuffer() {\n const view = new Uint8Array(this.size);\n let offset = 0;\n for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"))) {\n view.set(chunk, offset);\n offset += chunk.length;\n }\n return view.buffer;\n }\n stream() {\n const iterator = (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"), true);\n return new web_streams_polyfill_1.ReadableStream({\n async pull(controller) {\n const { value, done } = await iterator.next();\n if (done) {\n return queueMicrotask(() => controller.close());\n }\n controller.enqueue(value);\n },\n async cancel() {\n await iterator.return();\n }\n });\n }\n get [Symbol.toStringTag]() {\n return \"Blob\";\n }\n}\nexports.Blob = Blob;\nObject.defineProperties(Blob.prototype, {\n type: { enumerable: true },\n size: { enumerable: true },\n slice: { enumerable: true },\n stream: { enumerable: true },\n text: { enumerable: true },\n arrayBuffer: { enumerable: true }\n});\n","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _File_name, _File_lastModified;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = void 0;\nconst Blob_1 = require(\"./Blob\");\nclass File extends Blob_1.Blob {\n constructor(fileBits, name, options = {}) {\n super(fileBits, options);\n _File_name.set(this, void 0);\n _File_lastModified.set(this, 0);\n if (arguments.length < 2) {\n throw new TypeError(\"Failed to construct 'File': 2 arguments required, \"\n + `but only ${arguments.length} present.`);\n }\n __classPrivateFieldSet(this, _File_name, String(name), \"f\");\n const lastModified = options.lastModified === undefined\n ? Date.now()\n : Number(options.lastModified);\n if (!Number.isNaN(lastModified)) {\n __classPrivateFieldSet(this, _File_lastModified, lastModified, \"f\");\n }\n }\n static [(_File_name = new WeakMap(), _File_lastModified = new WeakMap(), Symbol.hasInstance)](value) {\n return value instanceof Blob_1.Blob\n && value[Symbol.toStringTag] === \"File\"\n && typeof value.name === \"string\";\n }\n get name() {\n return __classPrivateFieldGet(this, _File_name, \"f\");\n }\n get lastModified() {\n return __classPrivateFieldGet(this, _File_lastModified, \"f\");\n }\n get webkitRelativePath() {\n return \"\";\n }\n get [Symbol.toStringTag]() {\n return \"File\";\n }\n}\nexports.File = File;\n","\"use strict\";\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _FormData_instances, _FormData_entries, _FormData_setEntry;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FormData = void 0;\nconst util_1 = require(\"util\");\nconst File_1 = require(\"./File\");\nconst isFile_1 = require(\"./isFile\");\nconst isBlob_1 = require(\"./isBlob\");\nconst isFunction_1 = require(\"./isFunction\");\nconst deprecateConstructorEntries_1 = require(\"./deprecateConstructorEntries\");\nclass FormData {\n constructor(entries) {\n _FormData_instances.add(this);\n _FormData_entries.set(this, new Map());\n if (entries) {\n (0, deprecateConstructorEntries_1.deprecateConstructorEntries)();\n entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName));\n }\n }\n static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) {\n return Boolean(value\n && (0, isFunction_1.isFunction)(value.constructor)\n && value[Symbol.toStringTag] === \"FormData\"\n && (0, isFunction_1.isFunction)(value.append)\n && (0, isFunction_1.isFunction)(value.set)\n && (0, isFunction_1.isFunction)(value.get)\n && (0, isFunction_1.isFunction)(value.getAll)\n && (0, isFunction_1.isFunction)(value.has)\n && (0, isFunction_1.isFunction)(value.delete)\n && (0, isFunction_1.isFunction)(value.entries)\n && (0, isFunction_1.isFunction)(value.values)\n && (0, isFunction_1.isFunction)(value.keys)\n && (0, isFunction_1.isFunction)(value[Symbol.iterator])\n && (0, isFunction_1.isFunction)(value.forEach));\n }\n append(name, value, fileName) {\n __classPrivateFieldGet(this, _FormData_instances, \"m\", _FormData_setEntry).call(this, {\n name,\n fileName,\n append: true,\n rawValue: value,\n argsLength: arguments.length\n });\n }\n set(name, value, fileName) {\n __classPrivateFieldGet(this, _FormData_instances, \"m\", _FormData_setEntry).call(this, {\n name,\n fileName,\n append: false,\n rawValue: value,\n argsLength: arguments.length\n });\n }\n get(name) {\n const field = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(String(name));\n if (!field) {\n return null;\n }\n return field[0];\n }\n getAll(name) {\n const field = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(String(name));\n if (!field) {\n return [];\n }\n return field.slice();\n }\n has(name) {\n return __classPrivateFieldGet(this, _FormData_entries, \"f\").has(String(name));\n }\n delete(name) {\n __classPrivateFieldGet(this, _FormData_entries, \"f\").delete(String(name));\n }\n *keys() {\n for (const key of __classPrivateFieldGet(this, _FormData_entries, \"f\").keys()) {\n yield key;\n }\n }\n *entries() {\n for (const name of this.keys()) {\n const values = this.getAll(name);\n for (const value of values) {\n yield [name, value];\n }\n }\n }\n *values() {\n for (const [, value] of this) {\n yield value;\n }\n }\n [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) {\n const methodName = append ? \"append\" : \"set\";\n if (argsLength < 2) {\n throw new TypeError(`Failed to execute '${methodName}' on 'FormData': `\n + `2 arguments required, but only ${argsLength} present.`);\n }\n name = String(name);\n let value;\n if ((0, isFile_1.isFile)(rawValue)) {\n value = fileName === undefined\n ? rawValue\n : new File_1.File([rawValue], fileName, {\n type: rawValue.type,\n lastModified: rawValue.lastModified\n });\n }\n else if ((0, isBlob_1.isBlob)(rawValue)) {\n value = new File_1.File([rawValue], fileName === undefined ? \"blob\" : fileName, {\n type: rawValue.type\n });\n }\n else if (fileName) {\n throw new TypeError(`Failed to execute '${methodName}' on 'FormData': `\n + \"parameter 2 is not of type 'Blob'.\");\n }\n else {\n value = String(rawValue);\n }\n const values = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(name);\n if (!values) {\n return void __classPrivateFieldGet(this, _FormData_entries, \"f\").set(name, [value]);\n }\n if (!append) {\n return void __classPrivateFieldGet(this, _FormData_entries, \"f\").set(name, [value]);\n }\n values.push(value);\n }, Symbol.iterator)]() {\n return this.entries();\n }\n forEach(callback, thisArg) {\n for (const [name, value] of this) {\n callback.call(thisArg, value, name, this);\n }\n }\n get [Symbol.toStringTag]() {\n return \"FormData\";\n }\n [util_1.inspect.custom]() {\n return this[Symbol.toStringTag];\n }\n}\nexports.FormData = FormData;\n","\"use strict\";\n/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.sliceBlob = exports.consumeBlobParts = void 0;\nconst isFunction_1 = require(\"./isFunction\");\nconst CHUNK_SIZE = 65536;\nasync function* clonePart(part) {\n const end = part.byteOffset + part.byteLength;\n let position = part.byteOffset;\n while (position !== end) {\n const size = Math.min(end - position, CHUNK_SIZE);\n const chunk = part.buffer.slice(position, position + size);\n position += chunk.byteLength;\n yield new Uint8Array(chunk);\n }\n}\nasync function* consumeNodeBlob(blob) {\n let position = 0;\n while (position !== blob.size) {\n const chunk = blob.slice(position, Math.min(blob.size, position + CHUNK_SIZE));\n const buffer = await chunk.arrayBuffer();\n position += buffer.byteLength;\n yield new Uint8Array(buffer);\n }\n}\nasync function* consumeBlobParts(parts, clone = false) {\n for (const part of parts) {\n if (ArrayBuffer.isView(part)) {\n if (clone) {\n yield* clonePart(part);\n }\n else {\n yield part;\n }\n }\n else if ((0, isFunction_1.isFunction)(part.stream)) {\n yield* part.stream();\n }\n else {\n yield* consumeNodeBlob(part);\n }\n }\n}\nexports.consumeBlobParts = consumeBlobParts;\nfunction* sliceBlob(blobParts, blobSize, start = 0, end) {\n end !== null && end !== void 0 ? end : (end = blobSize);\n let relativeStart = start < 0\n ? Math.max(blobSize + start, 0)\n : Math.min(start, blobSize);\n let relativeEnd = end < 0\n ? Math.max(blobSize + end, 0)\n : Math.min(end, blobSize);\n const span = Math.max(relativeEnd - relativeStart, 0);\n let added = 0;\n for (const part of blobParts) {\n if (added >= span) {\n break;\n }\n const partSize = ArrayBuffer.isView(part) ? part.byteLength : part.size;\n if (relativeStart && partSize <= relativeStart) {\n relativeStart -= partSize;\n relativeEnd -= partSize;\n }\n else {\n let chunk;\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(partSize, relativeEnd));\n added += chunk.byteLength;\n }\n else {\n chunk = part.slice(relativeStart, Math.min(partSize, relativeEnd));\n added += chunk.size;\n }\n relativeEnd -= partSize;\n relativeStart = 0;\n yield chunk;\n }\n }\n}\nexports.sliceBlob = sliceBlob;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.deprecateConstructorEntries = void 0;\nconst util_1 = require(\"util\");\nexports.deprecateConstructorEntries = (0, util_1.deprecate)(() => { }, \"Constructor \\\"entries\\\" argument is not spec-compliant \"\n + \"and will be removed in next major release.\");\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _FileFromPath_path, _FileFromPath_start;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fileFromPath = exports.fileFromPathSync = void 0;\nconst fs_1 = require(\"fs\");\nconst path_1 = require(\"path\");\nconst node_domexception_1 = __importDefault(require(\"node-domexception\"));\nconst File_1 = require(\"./File\");\nconst isPlainObject_1 = __importDefault(require(\"./isPlainObject\"));\n__exportStar(require(\"./isFile\"), exports);\nconst MESSAGE = \"The requested file could not be read, \"\n + \"typically due to permission problems that have occurred after a reference \"\n + \"to a file was acquired.\";\nclass FileFromPath {\n constructor(input) {\n _FileFromPath_path.set(this, void 0);\n _FileFromPath_start.set(this, void 0);\n __classPrivateFieldSet(this, _FileFromPath_path, input.path, \"f\");\n __classPrivateFieldSet(this, _FileFromPath_start, input.start || 0, \"f\");\n this.name = (0, path_1.basename)(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"));\n this.size = input.size;\n this.lastModified = input.lastModified;\n }\n slice(start, end) {\n return new FileFromPath({\n path: __classPrivateFieldGet(this, _FileFromPath_path, \"f\"),\n lastModified: this.lastModified,\n size: end - start,\n start\n });\n }\n async *stream() {\n const { mtimeMs } = await fs_1.promises.stat(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"));\n if (mtimeMs > this.lastModified) {\n throw new node_domexception_1.default(MESSAGE, \"NotReadableError\");\n }\n if (this.size) {\n yield* (0, fs_1.createReadStream)(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"), {\n start: __classPrivateFieldGet(this, _FileFromPath_start, \"f\"),\n end: __classPrivateFieldGet(this, _FileFromPath_start, \"f\") + this.size - 1\n });\n }\n }\n get [(_FileFromPath_path = new WeakMap(), _FileFromPath_start = new WeakMap(), Symbol.toStringTag)]() {\n return \"File\";\n }\n}\nfunction createFileFromPath(path, { mtimeMs, size }, filenameOrOptions, options = {}) {\n let filename;\n if ((0, isPlainObject_1.default)(filenameOrOptions)) {\n [options, filename] = [filenameOrOptions, undefined];\n }\n else {\n filename = filenameOrOptions;\n }\n const file = new FileFromPath({ path, size, lastModified: mtimeMs });\n if (!filename) {\n filename = file.name;\n }\n return new File_1.File([file], filename, {\n ...options, lastModified: file.lastModified\n });\n}\nfunction fileFromPathSync(path, filenameOrOptions, options = {}) {\n const stats = (0, fs_1.statSync)(path);\n return createFileFromPath(path, stats, filenameOrOptions, options);\n}\nexports.fileFromPathSync = fileFromPathSync;\nasync function fileFromPath(path, filenameOrOptions, options) {\n const stats = await fs_1.promises.stat(path);\n return createFileFromPath(path, stats, filenameOrOptions, options);\n}\nexports.fileFromPath = fileFromPath;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./FormData\"), exports);\n__exportStar(require(\"./Blob\"), exports);\n__exportStar(require(\"./File\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isBlob = void 0;\nconst Blob_1 = require(\"./Blob\");\nconst isBlob = (value) => value instanceof Blob_1.Blob;\nexports.isBlob = isBlob;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFile = void 0;\nconst File_1 = require(\"./File\");\nconst isFile = (value) => value instanceof File_1.File;\nexports.isFile = isFile;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFunction = void 0;\nconst isFunction = (value) => (typeof value === \"function\");\nexports.isFunction = isFunction;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase());\nfunction isPlainObject(value) {\n if (getType(value) !== \"object\") {\n return false;\n }\n const pp = Object.getPrototypeOf(value);\n if (pp === null || pp === undefined) {\n return true;\n }\n const Ctor = pp.constructor && pp.constructor.toString();\n return Ctor === Object.toString();\n}\nexports.default = isPlainObject;\n","\"use strict\";\n// translate the various posix character classes into unicode properties\n// this works across all unicode locales\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.parseClass = void 0;\n// { : [, /u flag required, negated]\nconst posixClasses = {\n '[:alnum:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}', true],\n '[:alpha:]': ['\\\\p{L}\\\\p{Nl}', true],\n '[:ascii:]': ['\\\\x' + '00-\\\\x' + '7f', false],\n '[:blank:]': ['\\\\p{Zs}\\\\t', true],\n '[:cntrl:]': ['\\\\p{Cc}', true],\n '[:digit:]': ['\\\\p{Nd}', true],\n '[:graph:]': ['\\\\p{Z}\\\\p{C}', true, true],\n '[:lower:]': ['\\\\p{Ll}', true],\n '[:print:]': ['\\\\p{C}', true],\n '[:punct:]': ['\\\\p{P}', true],\n '[:space:]': ['\\\\p{Z}\\\\t\\\\r\\\\n\\\\v\\\\f', true],\n '[:upper:]': ['\\\\p{Lu}', true],\n '[:word:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}\\\\p{Pc}', true],\n '[:xdigit:]': ['A-Fa-f0-9', false],\n};\n// only need to escape a few things inside of brace expressions\n// escapes: [ \\ ] -\nconst braceEscape = (s) => s.replace(/[[\\]\\\\-]/g, '\\\\$&');\n// escape all regexp magic characters\nconst regexpEscape = (s) => s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n// everything has already been escaped, we just have to join\nconst rangesToString = (ranges) => ranges.join('');\n// takes a glob string at a posix brace expression, and returns\n// an equivalent regular expression source, and boolean indicating\n// whether the /u flag needs to be applied, and the number of chars\n// consumed to parse the character class.\n// This also removes out of order ranges, and returns ($.) if the\n// entire class just no good.\nconst parseClass = (glob, position) => {\n const pos = position;\n /* c8 ignore start */\n if (glob.charAt(pos) !== '[') {\n throw new Error('not in a brace expression');\n }\n /* c8 ignore stop */\n const ranges = [];\n const negs = [];\n let i = pos + 1;\n let sawStart = false;\n let uflag = false;\n let escaping = false;\n let negate = false;\n let endPos = pos;\n let rangeStart = '';\n WHILE: while (i < glob.length) {\n const c = glob.charAt(i);\n if ((c === '!' || c === '^') && i === pos + 1) {\n negate = true;\n i++;\n continue;\n }\n if (c === ']' && sawStart && !escaping) {\n endPos = i + 1;\n break;\n }\n sawStart = true;\n if (c === '\\\\') {\n if (!escaping) {\n escaping = true;\n i++;\n continue;\n }\n // escaped \\ char, fall through and treat like normal char\n }\n if (c === '[' && !escaping) {\n // either a posix class, a collation equivalent, or just a [\n for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {\n if (glob.startsWith(cls, i)) {\n // invalid, [a-[] is fine, but not [a-[:alpha]]\n if (rangeStart) {\n return ['$.', false, glob.length - pos, true];\n }\n i += cls.length;\n if (neg)\n negs.push(unip);\n else\n ranges.push(unip);\n uflag = uflag || u;\n continue WHILE;\n }\n }\n }\n // now it's just a normal character, effectively\n escaping = false;\n if (rangeStart) {\n // throw this range away if it's not valid, but others\n // can still match.\n if (c > rangeStart) {\n ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));\n }\n else if (c === rangeStart) {\n ranges.push(braceEscape(c));\n }\n rangeStart = '';\n i++;\n continue;\n }\n // now might be the start of a range.\n // can be either c-d or c-] or c] or c] at this point\n if (glob.startsWith('-]', i + 1)) {\n ranges.push(braceEscape(c + '-'));\n i += 2;\n continue;\n }\n if (glob.startsWith('-', i + 1)) {\n rangeStart = c;\n i += 2;\n continue;\n }\n // not the start of a range, just a single character\n ranges.push(braceEscape(c));\n i++;\n }\n if (endPos < i) {\n // didn't see the end of the class, not a valid class,\n // but might still be valid as a literal match.\n return ['', false, 0, false];\n }\n // if we got no ranges and no negates, then we have a range that\n // cannot possibly match anything, and that poisons the whole glob\n if (!ranges.length && !negs.length) {\n return ['$.', false, glob.length - pos, true];\n }\n // if we got one positive range, and it's a single character, then that's\n // not actually a magic pattern, it's just that one literal character.\n // we should not treat that as \"magic\", we should just return the literal\n // character. [_] is a perfectly valid way to escape glob magic chars.\n if (negs.length === 0 &&\n ranges.length === 1 &&\n /^\\\\?.$/.test(ranges[0]) &&\n !negate) {\n const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];\n return [regexpEscape(r), false, endPos - pos, false];\n }\n const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';\n const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';\n const comb = ranges.length && negs.length\n ? '(' + sranges + '|' + snegs + ')'\n : ranges.length\n ? sranges\n : snegs;\n return [comb, uflag, endPos - pos, true];\n};\nexports.parseClass = parseClass;\n//# sourceMappingURL=brace-expressions.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.escape = void 0;\n/**\n * Escape all magic characters in a glob pattern.\n *\n * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}\n * option is used, then characters are escaped by wrapping in `[]`, because\n * a magic character wrapped in a character class can only be satisfied by\n * that exact character. In this mode, `\\` is _not_ escaped, because it is\n * not interpreted as a magic character, but instead as a path separator.\n */\nconst escape = (s, { windowsPathsNoEscape = false, } = {}) => {\n // don't need to escape +@! because we escape the parens\n // that make those magic, and escaping ! as [!] isn't valid,\n // because [!]] is a valid glob class meaning not ']'.\n return windowsPathsNoEscape\n ? s.replace(/[?*()[\\]]/g, '[$&]')\n : s.replace(/[?*()[\\]\\\\]/g, '\\\\$&');\n};\nexports.escape = escape;\n//# sourceMappingURL=escape.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst index_js_1 = __importDefault(require(\"./index.js\"));\nmodule.exports = Object.assign(index_js_1.default, { default: index_js_1.default, minimatch: index_js_1.default });\n//# sourceMappingURL=index-cjs.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unescape = exports.escape = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;\nconst brace_expansion_1 = __importDefault(require(\"brace-expansion\"));\nconst brace_expressions_js_1 = require(\"./brace-expressions.js\");\nconst escape_js_1 = require(\"./escape.js\");\nconst unescape_js_1 = require(\"./unescape.js\");\nconst minimatch = (p, pattern, options = {}) => {\n assertValidPattern(pattern);\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false;\n }\n return new Minimatch(pattern, options).match(p);\n};\nexports.minimatch = minimatch;\nexports.default = exports.minimatch;\n// Optimized checking for the most common glob patterns.\nconst starDotExtRE = /^\\*+([^+@!?\\*\\[\\(]*)$/;\nconst starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);\nconst starDotExtTestDot = (ext) => (f) => f.endsWith(ext);\nconst starDotExtTestNocase = (ext) => {\n ext = ext.toLowerCase();\n return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);\n};\nconst starDotExtTestNocaseDot = (ext) => {\n ext = ext.toLowerCase();\n return (f) => f.toLowerCase().endsWith(ext);\n};\nconst starDotStarRE = /^\\*+\\.\\*+$/;\nconst starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');\nconst starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');\nconst dotStarRE = /^\\.\\*+$/;\nconst dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');\nconst starRE = /^\\*+$/;\nconst starTest = (f) => f.length !== 0 && !f.startsWith('.');\nconst starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';\nconst qmarksRE = /^\\?+([^+@!?\\*\\[\\(]*)?$/;\nconst qmarksTestNocase = ([$0, ext = '']) => {\n const noext = qmarksTestNoExt([$0]);\n if (!ext)\n return noext;\n ext = ext.toLowerCase();\n return (f) => noext(f) && f.toLowerCase().endsWith(ext);\n};\nconst qmarksTestNocaseDot = ([$0, ext = '']) => {\n const noext = qmarksTestNoExtDot([$0]);\n if (!ext)\n return noext;\n ext = ext.toLowerCase();\n return (f) => noext(f) && f.toLowerCase().endsWith(ext);\n};\nconst qmarksTestDot = ([$0, ext = '']) => {\n const noext = qmarksTestNoExtDot([$0]);\n return !ext ? noext : (f) => noext(f) && f.endsWith(ext);\n};\nconst qmarksTest = ([$0, ext = '']) => {\n const noext = qmarksTestNoExt([$0]);\n return !ext ? noext : (f) => noext(f) && f.endsWith(ext);\n};\nconst qmarksTestNoExt = ([$0]) => {\n const len = $0.length;\n return (f) => f.length === len && !f.startsWith('.');\n};\nconst qmarksTestNoExtDot = ([$0]) => {\n const len = $0.length;\n return (f) => f.length === len && f !== '.' && f !== '..';\n};\n/* c8 ignore start */\nconst defaultPlatform = (typeof process === 'object' && process\n ? (typeof process.env === 'object' &&\n process.env &&\n process.env.__MINIMATCH_TESTING_PLATFORM__) ||\n process.platform\n : 'posix');\nconst path = {\n win32: { sep: '\\\\' },\n posix: { sep: '/' },\n};\n/* c8 ignore stop */\nexports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;\nexports.minimatch.sep = exports.sep;\nexports.GLOBSTAR = Symbol('globstar **');\nexports.minimatch.GLOBSTAR = exports.GLOBSTAR;\nconst plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)' },\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' },\n};\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nconst qmark = '[^/]';\n// * => any number of characters\nconst star = qmark + '*?';\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nconst twoStarDot = '(?:(?!(?:\\\\/|^)(?:\\\\.{1,2})($|\\\\/)).)*?';\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nconst twoStarNoDot = '(?:(?!(?:\\\\/|^)\\\\.).)*?';\n// \"abc\" -> { a:true, b:true, c:true }\nconst charSet = (s) => s.split('').reduce((set, c) => {\n set[c] = true;\n return set;\n}, {});\n// characters that need to be escaped in RegExp.\nconst reSpecials = charSet('().*{}+?[]^$\\\\!');\n// characters that indicate we have to add the pattern start\nconst addPatternStartSet = charSet('[.(');\nconst filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);\nexports.filter = filter;\nexports.minimatch.filter = exports.filter;\nconst ext = (a, b = {}) => Object.assign({}, a, b);\nconst defaults = (def) => {\n if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n return exports.minimatch;\n }\n const orig = exports.minimatch;\n const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));\n return Object.assign(m, {\n Minimatch: class Minimatch extends orig.Minimatch {\n constructor(pattern, options = {}) {\n super(pattern, ext(def, options));\n }\n static defaults(options) {\n return orig.defaults(ext(def, options)).Minimatch;\n }\n },\n unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),\n escape: (s, options = {}) => orig.escape(s, ext(def, options)),\n filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),\n defaults: (options) => orig.defaults(ext(def, options)),\n makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),\n braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),\n match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),\n sep: orig.sep,\n GLOBSTAR: exports.GLOBSTAR,\n });\n};\nexports.defaults = defaults;\nexports.minimatch.defaults = exports.defaults;\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nconst braceExpand = (pattern, options = {}) => {\n assertValidPattern(pattern);\n // Thanks to Yeting Li for\n // improving this regexp to avoid a ReDOS vulnerability.\n if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n // shortcut. no need to expand.\n return [pattern];\n }\n return (0, brace_expansion_1.default)(pattern);\n};\nexports.braceExpand = braceExpand;\nexports.minimatch.braceExpand = exports.braceExpand;\nconst MAX_PATTERN_LENGTH = 1024 * 64;\nconst assertValidPattern = (pattern) => {\n if (typeof pattern !== 'string') {\n throw new TypeError('invalid pattern');\n }\n if (pattern.length > MAX_PATTERN_LENGTH) {\n throw new TypeError('pattern is too long');\n }\n};\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nconst makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();\nexports.makeRe = makeRe;\nexports.minimatch.makeRe = exports.makeRe;\nconst match = (list, pattern, options = {}) => {\n const mm = new Minimatch(pattern, options);\n list = list.filter(f => mm.match(f));\n if (mm.options.nonull && !list.length) {\n list.push(pattern);\n }\n return list;\n};\nexports.match = match;\nexports.minimatch.match = exports.match;\n// replace stuff like \\* with *\nconst globUnescape = (s) => s.replace(/\\\\(.)/g, '$1');\nconst globMagic = /[?*]|[+@!]\\(.*?\\)|\\[|\\]/;\nconst regExpEscape = (s) => s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\nclass Minimatch {\n options;\n set;\n pattern;\n windowsPathsNoEscape;\n nonegate;\n negate;\n comment;\n empty;\n preserveMultipleSlashes;\n partial;\n globSet;\n globParts;\n nocase;\n isWindows;\n platform;\n windowsNoMagicRoot;\n regexp;\n constructor(pattern, options = {}) {\n assertValidPattern(pattern);\n options = options || {};\n this.options = options;\n this.pattern = pattern;\n this.platform = options.platform || defaultPlatform;\n this.isWindows = this.platform === 'win32';\n this.windowsPathsNoEscape =\n !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;\n if (this.windowsPathsNoEscape) {\n this.pattern = this.pattern.replace(/\\\\/g, '/');\n }\n this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;\n this.regexp = null;\n this.negate = false;\n this.nonegate = !!options.nonegate;\n this.comment = false;\n this.empty = false;\n this.partial = !!options.partial;\n this.nocase = !!this.options.nocase;\n this.windowsNoMagicRoot =\n options.windowsNoMagicRoot !== undefined\n ? options.windowsNoMagicRoot\n : !!(this.isWindows && this.nocase);\n this.globSet = [];\n this.globParts = [];\n this.set = [];\n // make the set of regexps etc.\n this.make();\n }\n hasMagic() {\n if (this.options.magicalBraces && this.set.length > 1) {\n return true;\n }\n for (const pattern of this.set) {\n for (const part of pattern) {\n if (typeof part !== 'string')\n return true;\n }\n }\n return false;\n }\n debug(..._) { }\n make() {\n const pattern = this.pattern;\n const options = this.options;\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true;\n return;\n }\n if (!pattern) {\n this.empty = true;\n return;\n }\n // step 1: figure out negation, etc.\n this.parseNegate();\n // step 2: expand braces\n this.globSet = [...new Set(this.braceExpand())];\n if (options.debug) {\n this.debug = (...args) => console.error(...args);\n }\n this.debug(this.pattern, this.globSet);\n // step 3: now we have a set, so turn each one into a series of\n // path-portion matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n //\n // First, we preprocess to make the glob pattern sets a bit simpler\n // and deduped. There are some perf-killing patterns that can cause\n // problems with a glob walk, but we can simplify them down a bit.\n const rawGlobParts = this.globSet.map(s => this.slashSplit(s));\n this.globParts = this.preprocess(rawGlobParts);\n this.debug(this.pattern, this.globParts);\n // glob --> regexps\n let set = this.globParts.map((s, _, __) => {\n if (this.isWindows && this.windowsNoMagicRoot) {\n // check if it's a drive or unc path.\n const isUNC = s[0] === '' &&\n s[1] === '' &&\n (s[2] === '?' || !globMagic.test(s[2])) &&\n !globMagic.test(s[3]);\n const isDrive = /^[a-z]:/i.test(s[0]);\n if (isUNC) {\n return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];\n }\n else if (isDrive) {\n return [s[0], ...s.slice(1).map(ss => this.parse(ss))];\n }\n }\n return s.map(ss => this.parse(ss));\n });\n this.debug(this.pattern, set);\n // filter out everything that didn't compile properly.\n this.set = set.filter(s => s.indexOf(false) === -1);\n // do not treat the ? in UNC paths as magic\n if (this.isWindows) {\n for (let i = 0; i < this.set.length; i++) {\n const p = this.set[i];\n if (p[0] === '' &&\n p[1] === '' &&\n this.globParts[i][2] === '?' &&\n typeof p[3] === 'string' &&\n /^[a-z]:$/i.test(p[3])) {\n p[2] = '?';\n }\n }\n }\n this.debug(this.pattern, this.set);\n }\n // various transforms to equivalent pattern sets that are\n // faster to process in a filesystem walk. The goal is to\n // eliminate what we can, and push all ** patterns as far\n // to the right as possible, even if it increases the number\n // of patterns that we have to process.\n preprocess(globParts) {\n // if we're not in globstar mode, then turn all ** into *\n if (this.options.noglobstar) {\n for (let i = 0; i < globParts.length; i++) {\n for (let j = 0; j < globParts[i].length; j++) {\n if (globParts[i][j] === '**') {\n globParts[i][j] = '*';\n }\n }\n }\n }\n const { optimizationLevel = 1 } = this.options;\n if (optimizationLevel >= 2) {\n // aggressive optimization for the purpose of fs walking\n globParts = this.firstPhasePreProcess(globParts);\n globParts = this.secondPhasePreProcess(globParts);\n }\n else if (optimizationLevel >= 1) {\n // just basic optimizations to remove some .. parts\n globParts = this.levelOneOptimize(globParts);\n }\n else {\n globParts = this.adjascentGlobstarOptimize(globParts);\n }\n return globParts;\n }\n // just get rid of adjascent ** portions\n adjascentGlobstarOptimize(globParts) {\n return globParts.map(parts => {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let i = gs;\n while (parts[i + 1] === '**') {\n i++;\n }\n if (i !== gs) {\n parts.splice(gs, i - gs);\n }\n }\n return parts;\n });\n }\n // get rid of adjascent ** and resolve .. portions\n levelOneOptimize(globParts) {\n return globParts.map(parts => {\n parts = parts.reduce((set, part) => {\n const prev = set[set.length - 1];\n if (part === '**' && prev === '**') {\n return set;\n }\n if (part === '..') {\n if (prev && prev !== '..' && prev !== '.' && prev !== '**') {\n set.pop();\n return set;\n }\n }\n set.push(part);\n return set;\n }, []);\n return parts.length === 0 ? [''] : parts;\n });\n }\n levelTwoFileOptimize(parts) {\n if (!Array.isArray(parts)) {\n parts = this.slashSplit(parts);\n }\n let didSomething = false;\n do {\n didSomething = false;\n //
// -> 
/\n            if (!this.preserveMultipleSlashes) {\n                for (let i = 1; i < parts.length - 1; i++) {\n                    const p = parts[i];\n                    // don't squeeze out UNC patterns\n                    if (i === 1 && p === '' && parts[0] === '')\n                        continue;\n                    if (p === '.' || p === '') {\n                        didSomething = true;\n                        parts.splice(i, 1);\n                        i--;\n                    }\n                }\n                if (parts[0] === '.' &&\n                    parts.length === 2 &&\n                    (parts[1] === '.' || parts[1] === '')) {\n                    didSomething = true;\n                    parts.pop();\n                }\n            }\n            // 
/

/../ ->

/\n            let dd = 0;\n            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n                const p = parts[dd - 1];\n                if (p && p !== '.' && p !== '..' && p !== '**') {\n                    didSomething = true;\n                    parts.splice(dd - 1, 2);\n                    dd -= 2;\n                }\n            }\n        } while (didSomething);\n        return parts.length === 0 ? [''] : parts;\n    }\n    // First phase: single-pattern processing\n    // 
 is 1 or more portions\n    //  is 1 or more portions\n    // 

is any portion other than ., .., '', or **\n // is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n //

// -> 
/\n    // 
/

/../ ->

/\n    // **/**/ -> **/\n    //\n    // **/*/ -> */**/ <== not valid because ** doesn't follow\n    // this WOULD be allowed if ** did follow symlinks, or * didn't\n    firstPhasePreProcess(globParts) {\n        let didSomething = false;\n        do {\n            didSomething = false;\n            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n for (let parts of globParts) {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss = gs;\n while (parts[gss + 1] === '**') {\n //

/**/**/ -> 
/**/\n                        gss++;\n                    }\n                    // eg, if gs is 2 and gss is 4, that means we have 3 **\n                    // parts, and can remove 2 of them.\n                    if (gss > gs) {\n                        parts.splice(gs + 1, gss - gs);\n                    }\n                    let next = parts[gs + 1];\n                    const p = parts[gs + 2];\n                    const p2 = parts[gs + 3];\n                    if (next !== '..')\n                        continue;\n                    if (!p ||\n                        p === '.' ||\n                        p === '..' ||\n                        !p2 ||\n                        p2 === '.' ||\n                        p2 === '..') {\n                        continue;\n                    }\n                    didSomething = true;\n                    // edit parts in place, and push the new one\n                    parts.splice(gs, 1);\n                    const other = parts.slice(0);\n                    other[gs] = '**';\n                    globParts.push(other);\n                    gs--;\n                }\n                // 
// -> 
/\n                if (!this.preserveMultipleSlashes) {\n                    for (let i = 1; i < parts.length - 1; i++) {\n                        const p = parts[i];\n                        // don't squeeze out UNC patterns\n                        if (i === 1 && p === '' && parts[0] === '')\n                            continue;\n                        if (p === '.' || p === '') {\n                            didSomething = true;\n                            parts.splice(i, 1);\n                            i--;\n                        }\n                    }\n                    if (parts[0] === '.' &&\n                        parts.length === 2 &&\n                        (parts[1] === '.' || parts[1] === '')) {\n                        didSomething = true;\n                        parts.pop();\n                    }\n                }\n                // 
/

/../ ->

/\n                let dd = 0;\n                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n                    const p = parts[dd - 1];\n                    if (p && p !== '.' && p !== '..' && p !== '**') {\n                        didSomething = true;\n                        const needDot = dd === 1 && parts[dd + 1] === '**';\n                        const splin = needDot ? ['.'] : [];\n                        parts.splice(dd - 1, 2, ...splin);\n                        if (parts.length === 0)\n                            parts.push('');\n                        dd -= 2;\n                    }\n                }\n            }\n        } while (didSomething);\n        return globParts;\n    }\n    // second phase: multi-pattern dedupes\n    // {
/*/,
/

/} ->

/*/\n    // {
/,
/} -> 
/\n    // {
/**/,
/} -> 
/**/\n    //\n    // {
/**/,
/**/

/} ->

/**/\n    // ^-- not valid because ** doens't follow symlinks\n    secondPhasePreProcess(globParts) {\n        for (let i = 0; i < globParts.length - 1; i++) {\n            for (let j = i + 1; j < globParts.length; j++) {\n                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);\n                if (!matched)\n                    continue;\n                globParts[i] = matched;\n                globParts[j] = [];\n            }\n        }\n        return globParts.filter(gs => gs.length);\n    }\n    partsMatch(a, b, emptyGSMatch = false) {\n        let ai = 0;\n        let bi = 0;\n        let result = [];\n        let which = '';\n        while (ai < a.length && bi < b.length) {\n            if (a[ai] === b[bi]) {\n                result.push(which === 'b' ? b[bi] : a[ai]);\n                ai++;\n                bi++;\n            }\n            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n                result.push(a[ai]);\n                ai++;\n            }\n            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n                result.push(b[bi]);\n                bi++;\n            }\n            else if (a[ai] === '*' &&\n                b[bi] &&\n                (this.options.dot || !b[bi].startsWith('.')) &&\n                b[bi] !== '**') {\n                if (which === 'b')\n                    return false;\n                which = 'a';\n                result.push(a[ai]);\n                ai++;\n                bi++;\n            }\n            else if (b[bi] === '*' &&\n                a[ai] &&\n                (this.options.dot || !a[ai].startsWith('.')) &&\n                a[ai] !== '**') {\n                if (which === 'a')\n                    return false;\n                which = 'b';\n                result.push(b[bi]);\n                ai++;\n                bi++;\n            }\n            else {\n                return false;\n            }\n        }\n        // if we fall out of the loop, it means they two are identical\n        // as long as their lengths match\n        return a.length === b.length && result;\n    }\n    parseNegate() {\n        if (this.nonegate)\n            return;\n        const pattern = this.pattern;\n        let negate = false;\n        let negateOffset = 0;\n        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n            negate = !negate;\n            negateOffset++;\n        }\n        if (negateOffset)\n            this.pattern = pattern.slice(negateOffset);\n        this.negate = negate;\n    }\n    // set partial to true to test if, for example,\n    // \"/a/b\" matches the start of \"/*/b/*/d\"\n    // Partial means, if you run out of file before you run\n    // out of pattern, then that's fine, as long as all\n    // the parts match.\n    matchOne(file, pattern, partial = false) {\n        const options = this.options;\n        // a UNC pattern like //?/c:/* can match a path like c:/x\n        // and vice versa\n        if (this.isWindows) {\n            const fileUNC = file[0] === '' &&\n                file[1] === '' &&\n                file[2] === '?' &&\n                typeof file[3] === 'string' &&\n                /^[a-z]:$/i.test(file[3]);\n            const patternUNC = pattern[0] === '' &&\n                pattern[1] === '' &&\n                pattern[2] === '?' &&\n                typeof pattern[3] === 'string' &&\n                /^[a-z]:$/i.test(pattern[3]);\n            if (fileUNC && patternUNC) {\n                const fd = file[3];\n                const pd = pattern[3];\n                if (fd.toLowerCase() === pd.toLowerCase()) {\n                    file[3] = pd;\n                }\n            }\n            else if (patternUNC && typeof file[0] === 'string') {\n                const pd = pattern[3];\n                const fd = file[0];\n                if (pd.toLowerCase() === fd.toLowerCase()) {\n                    pattern[3] = fd;\n                    pattern = pattern.slice(3);\n                }\n            }\n            else if (fileUNC && typeof pattern[0] === 'string') {\n                const fd = file[3];\n                if (fd.toLowerCase() === pattern[0].toLowerCase()) {\n                    pattern[0] = fd;\n                    file = file.slice(3);\n                }\n            }\n        }\n        // resolve and reduce . and .. portions in the file as well.\n        // dont' need to do the second phase, because it's only one string[]\n        const { optimizationLevel = 1 } = this.options;\n        if (optimizationLevel >= 2) {\n            file = this.levelTwoFileOptimize(file);\n        }\n        this.debug('matchOne', this, { file, pattern });\n        this.debug('matchOne', file.length, pattern.length);\n        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {\n            this.debug('matchOne loop');\n            var p = pattern[pi];\n            var f = file[fi];\n            this.debug(pattern, p, f);\n            // should be impossible.\n            // some invalid regexp stuff in the set.\n            /* c8 ignore start */\n            if (p === false) {\n                return false;\n            }\n            /* c8 ignore stop */\n            if (p === exports.GLOBSTAR) {\n                this.debug('GLOBSTAR', [pattern, p, f]);\n                // \"**\"\n                // a/**/b/**/c would match the following:\n                // a/b/x/y/z/c\n                // a/x/y/z/b/c\n                // a/b/x/b/x/c\n                // a/b/c\n                // To do this, take the rest of the pattern after\n                // the **, and see if it would match the file remainder.\n                // If so, return success.\n                // If not, the ** \"swallows\" a segment, and try again.\n                // This is recursively awful.\n                //\n                // a/**/b/**/c matching a/b/x/y/z/c\n                // - a matches a\n                // - doublestar\n                //   - matchOne(b/x/y/z/c, b/**/c)\n                //     - b matches b\n                //     - doublestar\n                //       - matchOne(x/y/z/c, c) -> no\n                //       - matchOne(y/z/c, c) -> no\n                //       - matchOne(z/c, c) -> no\n                //       - matchOne(c, c) yes, hit\n                var fr = fi;\n                var pr = pi + 1;\n                if (pr === pl) {\n                    this.debug('** at the end');\n                    // a ** at the end will just swallow the rest.\n                    // We have found a match.\n                    // however, it will not swallow /.x, unless\n                    // options.dot is set.\n                    // . and .. are *never* matched by **, for explosively\n                    // exponential reasons.\n                    for (; fi < fl; fi++) {\n                        if (file[fi] === '.' ||\n                            file[fi] === '..' ||\n                            (!options.dot && file[fi].charAt(0) === '.'))\n                            return false;\n                    }\n                    return true;\n                }\n                // ok, let's see if we can swallow whatever we can.\n                while (fr < fl) {\n                    var swallowee = file[fr];\n                    this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee);\n                    // XXX remove this slice.  Just pass the start index.\n                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n                        this.debug('globstar found match!', fr, fl, swallowee);\n                        // found a match.\n                        return true;\n                    }\n                    else {\n                        // can't swallow \".\" or \"..\" ever.\n                        // can only swallow \".foo\" when explicitly asked.\n                        if (swallowee === '.' ||\n                            swallowee === '..' ||\n                            (!options.dot && swallowee.charAt(0) === '.')) {\n                            this.debug('dot detected!', file, fr, pattern, pr);\n                            break;\n                        }\n                        // ** swallows a segment, and continue.\n                        this.debug('globstar swallow a segment, and continue');\n                        fr++;\n                    }\n                }\n                // no match was found.\n                // However, in partial mode, we can't say this is necessarily over.\n                /* c8 ignore start */\n                if (partial) {\n                    // ran out of file\n                    this.debug('\\n>>> no match, partial?', file, fr, pattern, pr);\n                    if (fr === fl) {\n                        return true;\n                    }\n                }\n                /* c8 ignore stop */\n                return false;\n            }\n            // something other than **\n            // non-magic patterns just have to match exactly\n            // patterns with magic have been turned into regexps.\n            let hit;\n            if (typeof p === 'string') {\n                hit = f === p;\n                this.debug('string match', p, f, hit);\n            }\n            else {\n                hit = p.test(f);\n                this.debug('pattern match', p, f, hit);\n            }\n            if (!hit)\n                return false;\n        }\n        // Note: ending in / means that we'll get a final \"\"\n        // at the end of the pattern.  This can only match a\n        // corresponding \"\" at the end of the file.\n        // If the file ends in /, then it can only match a\n        // a pattern that ends in /, unless the pattern just\n        // doesn't have any more for it. But, a/b/ should *not*\n        // match \"a/b/*\", even though \"\" matches against the\n        // [^/]*? pattern, except in partial mode, where it might\n        // simply not be reached yet.\n        // However, a/b/ should still satisfy a/*\n        // now either we fell off the end of the pattern, or we're done.\n        if (fi === fl && pi === pl) {\n            // ran out of pattern and filename at the same time.\n            // an exact hit!\n            return true;\n        }\n        else if (fi === fl) {\n            // ran out of file, but still had pattern left.\n            // this is ok if we're doing the match as part of\n            // a glob fs traversal.\n            return partial;\n        }\n        else if (pi === pl) {\n            // ran out of pattern, still have file left.\n            // this is only acceptable if we're on the very last\n            // empty segment of a file with a trailing slash.\n            // a/* should match a/b/\n            return fi === fl - 1 && file[fi] === '';\n            /* c8 ignore start */\n        }\n        else {\n            // should be unreachable.\n            throw new Error('wtf?');\n        }\n        /* c8 ignore stop */\n    }\n    braceExpand() {\n        return (0, exports.braceExpand)(this.pattern, this.options);\n    }\n    parse(pattern) {\n        assertValidPattern(pattern);\n        const options = this.options;\n        // shortcuts\n        if (pattern === '**')\n            return exports.GLOBSTAR;\n        if (pattern === '')\n            return '';\n        // far and away, the most common glob pattern parts are\n        // *, *.*, and *.  Add a fast check method for those.\n        let m;\n        let fastTest = null;\n        if ((m = pattern.match(starRE))) {\n            fastTest = options.dot ? starTestDot : starTest;\n        }\n        else if ((m = pattern.match(starDotExtRE))) {\n            fastTest = (options.nocase\n                ? options.dot\n                    ? starDotExtTestNocaseDot\n                    : starDotExtTestNocase\n                : options.dot\n                    ? starDotExtTestDot\n                    : starDotExtTest)(m[1]);\n        }\n        else if ((m = pattern.match(qmarksRE))) {\n            fastTest = (options.nocase\n                ? options.dot\n                    ? qmarksTestNocaseDot\n                    : qmarksTestNocase\n                : options.dot\n                    ? qmarksTestDot\n                    : qmarksTest)(m);\n        }\n        else if ((m = pattern.match(starDotStarRE))) {\n            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;\n        }\n        else if ((m = pattern.match(dotStarRE))) {\n            fastTest = dotStarTest;\n        }\n        let re = '';\n        let hasMagic = false;\n        let escaping = false;\n        // ? => one single character\n        const patternListStack = [];\n        const negativeLists = [];\n        let stateChar = false;\n        let uflag = false;\n        let pl;\n        // . and .. never match anything that doesn't start with .,\n        // even when options.dot is set.  However, if the pattern\n        // starts with ., then traversal patterns can match.\n        let dotTravAllowed = pattern.charAt(0) === '.';\n        let dotFileAllowed = options.dot || dotTravAllowed;\n        const patternStart = () => dotTravAllowed\n            ? ''\n            : dotFileAllowed\n                ? '(?!(?:^|\\\\/)\\\\.{1,2}(?:$|\\\\/))'\n                : '(?!\\\\.)';\n        const subPatternStart = (p) => p.charAt(0) === '.'\n            ? ''\n            : options.dot\n                ? '(?!(?:^|\\\\/)\\\\.{1,2}(?:$|\\\\/))'\n                : '(?!\\\\.)';\n        const clearStateChar = () => {\n            if (stateChar) {\n                // we had some state-tracking character\n                // that wasn't consumed by this pass.\n                switch (stateChar) {\n                    case '*':\n                        re += star;\n                        hasMagic = true;\n                        break;\n                    case '?':\n                        re += qmark;\n                        hasMagic = true;\n                        break;\n                    default:\n                        re += '\\\\' + stateChar;\n                        break;\n                }\n                this.debug('clearStateChar %j %j', stateChar, re);\n                stateChar = false;\n            }\n        };\n        for (let i = 0, c; i < pattern.length && (c = pattern.charAt(i)); i++) {\n            this.debug('%s\\t%s %s %j', pattern, i, re, c);\n            // skip over any that are escaped.\n            if (escaping) {\n                // completely not allowed, even escaped.\n                // should be impossible.\n                /* c8 ignore start */\n                if (c === '/') {\n                    return false;\n                }\n                /* c8 ignore stop */\n                if (reSpecials[c]) {\n                    re += '\\\\';\n                }\n                re += c;\n                escaping = false;\n                continue;\n            }\n            switch (c) {\n                // Should already be path-split by now.\n                /* c8 ignore start */\n                case '/': {\n                    return false;\n                }\n                /* c8 ignore stop */\n                case '\\\\':\n                    clearStateChar();\n                    escaping = true;\n                    continue;\n                // the various stateChar values\n                // for the \"extglob\" stuff.\n                case '?':\n                case '*':\n                case '+':\n                case '@':\n                case '!':\n                    this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c);\n                    // if we already have a stateChar, then it means\n                    // that there was something like ** or +? in there.\n                    // Handle the stateChar, then proceed with this one.\n                    this.debug('call clearStateChar %j', stateChar);\n                    clearStateChar();\n                    stateChar = c;\n                    // if extglob is disabled, then +(asdf|foo) isn't a thing.\n                    // just clear the statechar *now*, rather than even diving into\n                    // the patternList stuff.\n                    if (options.noext)\n                        clearStateChar();\n                    continue;\n                case '(': {\n                    if (!stateChar) {\n                        re += '\\\\(';\n                        continue;\n                    }\n                    const plEntry = {\n                        type: stateChar,\n                        start: i - 1,\n                        reStart: re.length,\n                        open: plTypes[stateChar].open,\n                        close: plTypes[stateChar].close,\n                    };\n                    this.debug(this.pattern, '\\t', plEntry);\n                    patternListStack.push(plEntry);\n                    // negation is (?:(?!(?:js)(?:))[^/]*)\n                    re += plEntry.open;\n                    // next entry starts with a dot maybe?\n                    if (plEntry.start === 0 && plEntry.type !== '!') {\n                        dotTravAllowed = true;\n                        re += subPatternStart(pattern.slice(i + 1));\n                    }\n                    this.debug('plType %j %j', stateChar, re);\n                    stateChar = false;\n                    continue;\n                }\n                case ')': {\n                    const plEntry = patternListStack[patternListStack.length - 1];\n                    if (!plEntry) {\n                        re += '\\\\)';\n                        continue;\n                    }\n                    patternListStack.pop();\n                    // closing an extglob\n                    clearStateChar();\n                    hasMagic = true;\n                    pl = plEntry;\n                    // negation is (?:(?!js)[^/]*)\n                    // The others are (?:)\n                    re += pl.close;\n                    if (pl.type === '!') {\n                        negativeLists.push(Object.assign(pl, { reEnd: re.length }));\n                    }\n                    continue;\n                }\n                case '|': {\n                    const plEntry = patternListStack[patternListStack.length - 1];\n                    if (!plEntry) {\n                        re += '\\\\|';\n                        continue;\n                    }\n                    clearStateChar();\n                    re += '|';\n                    // next subpattern can start with a dot?\n                    if (plEntry.start === 0 && plEntry.type !== '!') {\n                        dotTravAllowed = true;\n                        re += subPatternStart(pattern.slice(i + 1));\n                    }\n                    continue;\n                }\n                // these are mostly the same in regexp and glob\n                case '[':\n                    // swallow any state-tracking char before the [\n                    clearStateChar();\n                    const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(pattern, i);\n                    if (consumed) {\n                        re += src;\n                        uflag = uflag || needUflag;\n                        i += consumed - 1;\n                        hasMagic = hasMagic || magic;\n                    }\n                    else {\n                        re += '\\\\[';\n                    }\n                    continue;\n                case ']':\n                    re += '\\\\' + c;\n                    continue;\n                default:\n                    // swallow any state char that wasn't consumed\n                    clearStateChar();\n                    re += regExpEscape(c);\n                    break;\n            } // switch\n        } // for\n        // handle the case where we had a +( thing at the *end*\n        // of the pattern.\n        // each pattern list stack adds 3 chars, and we need to go through\n        // and escape any | chars that were passed through as-is for the regexp.\n        // Go through and escape them, taking care not to double-escape any\n        // | chars that were already escaped.\n        for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n            let tail;\n            tail = re.slice(pl.reStart + pl.open.length);\n            this.debug(this.pattern, 'setting tail', re, pl);\n            // maybe some even number of \\, then maybe 1 \\, followed by a |\n            tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, (_, $1, $2) => {\n                if (!$2) {\n                    // the | isn't already escaped, so escape it.\n                    $2 = '\\\\';\n                    // should already be done\n                    /* c8 ignore start */\n                }\n                /* c8 ignore stop */\n                // need to escape all those slashes *again*, without escaping the\n                // one that we need for escaping the | character.  As it works out,\n                // escaping an even number of slashes can be done by simply repeating\n                // it exactly after itself.  That's why this trick works.\n                //\n                // I am sorry that you have to see this.\n                return $1 + $1 + $2 + '|';\n            });\n            this.debug('tail=%j\\n   %s', tail, tail, pl, re);\n            const t = pl.type === '*' ? star : pl.type === '?' ? qmark : '\\\\' + pl.type;\n            hasMagic = true;\n            re = re.slice(0, pl.reStart) + t + '\\\\(' + tail;\n        }\n        // handle trailing things that only matter at the very end.\n        clearStateChar();\n        if (escaping) {\n            // trailing \\\\\n            re += '\\\\\\\\';\n        }\n        // only need to apply the nodot start if the re starts with\n        // something that could conceivably capture a dot\n        const addPatternStart = addPatternStartSet[re.charAt(0)];\n        // Hack to work around lack of negative lookbehind in JS\n        // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n        // like 'a.xyz.yz' doesn't match.  So, the first negative\n        // lookahead, has to look ALL the way ahead, to the end of\n        // the pattern.\n        for (let n = negativeLists.length - 1; n > -1; n--) {\n            const nl = negativeLists[n];\n            const nlBefore = re.slice(0, nl.reStart);\n            const nlFirst = re.slice(nl.reStart, nl.reEnd - 8);\n            let nlAfter = re.slice(nl.reEnd);\n            const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter;\n            // Handle nested stuff like *(*.js|!(*.json)), where open parens\n            // mean that we should *not* include the ) in the bit that is considered\n            // \"after\" the negated section.\n            const closeParensBefore = nlBefore.split(')').length;\n            const openParensBefore = nlBefore.split('(').length - closeParensBefore;\n            let cleanAfter = nlAfter;\n            for (let i = 0; i < openParensBefore; i++) {\n                cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '');\n            }\n            nlAfter = cleanAfter;\n            const dollar = nlAfter === '' ? '(?:$|\\\\/)' : '';\n            re = nlBefore + nlFirst + nlAfter + dollar + nlLast;\n        }\n        // if the re is not \"\" at this point, then we need to make sure\n        // it doesn't match against an empty path part.\n        // Otherwise a/* will match a/, which it should not.\n        if (re !== '' && hasMagic) {\n            re = '(?=.)' + re;\n        }\n        if (addPatternStart) {\n            re = patternStart() + re;\n        }\n        // if it's nocase, and the lcase/uppercase don't match, it's magic\n        if (options.nocase && !hasMagic && !options.nocaseMagicOnly) {\n            hasMagic = pattern.toUpperCase() !== pattern.toLowerCase();\n        }\n        // skip the regexp for non-magical patterns\n        // unescape anything in it, though, so that it'll be\n        // an exact match against a file etc.\n        if (!hasMagic) {\n            return globUnescape(re);\n        }\n        const flags = (options.nocase ? 'i' : '') + (uflag ? 'u' : '');\n        try {\n            const ext = fastTest\n                ? {\n                    _glob: pattern,\n                    _src: re,\n                    test: fastTest,\n                }\n                : {\n                    _glob: pattern,\n                    _src: re,\n                };\n            return Object.assign(new RegExp('^' + re + '$', flags), ext);\n            /* c8 ignore start */\n        }\n        catch (er) {\n            // should be impossible\n            // If it was an invalid regular expression, then it can't match\n            // anything.  This trick looks for a character after the end of\n            // the string, which is of course impossible, except in multi-line\n            // mode, but it's not a /m regex.\n            this.debug('invalid regexp', er);\n            return new RegExp('$.');\n        }\n        /* c8 ignore stop */\n    }\n    makeRe() {\n        if (this.regexp || this.regexp === false)\n            return this.regexp;\n        // at this point, this.set is a 2d array of partial\n        // pattern strings, or \"**\".\n        //\n        // It's better to use .match().  This function shouldn't\n        // be used, really, but it's pretty convenient sometimes,\n        // when you just want to work with a regex.\n        const set = this.set;\n        if (!set.length) {\n            this.regexp = false;\n            return this.regexp;\n        }\n        const options = this.options;\n        const twoStar = options.noglobstar\n            ? star\n            : options.dot\n                ? twoStarDot\n                : twoStarNoDot;\n        const flags = options.nocase ? 'i' : '';\n        // regexpify non-globstar patterns\n        // if ** is only item, then we just do one twoStar\n        // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n        // if ** is last, append (\\/twoStar|) to previous\n        // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n        // then filter out GLOBSTAR symbols\n        let re = set\n            .map(pattern => {\n            const pp = pattern.map(p => typeof p === 'string'\n                ? regExpEscape(p)\n                : p === exports.GLOBSTAR\n                    ? exports.GLOBSTAR\n                    : p._src);\n            pp.forEach((p, i) => {\n                const next = pp[i + 1];\n                const prev = pp[i - 1];\n                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {\n                    return;\n                }\n                if (prev === undefined) {\n                    if (next !== undefined && next !== exports.GLOBSTAR) {\n                        pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next;\n                    }\n                    else {\n                        pp[i] = twoStar;\n                    }\n                }\n                else if (next === undefined) {\n                    pp[i - 1] = prev + '(?:\\\\/|' + twoStar + ')?';\n                }\n                else if (next !== exports.GLOBSTAR) {\n                    pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next;\n                    pp[i + 1] = exports.GLOBSTAR;\n                }\n            });\n            return pp.filter(p => p !== exports.GLOBSTAR).join('/');\n        })\n            .join('|');\n        // must match entire pattern\n        // ending in a * or ** will make it less strict.\n        re = '^(?:' + re + ')$';\n        // can match anything, as long as it's not this.\n        if (this.negate)\n            re = '^(?!' + re + ').*$';\n        try {\n            this.regexp = new RegExp(re, flags);\n            /* c8 ignore start */\n        }\n        catch (ex) {\n            // should be impossible\n            this.regexp = false;\n        }\n        /* c8 ignore stop */\n        return this.regexp;\n    }\n    slashSplit(p) {\n        // if p starts with // on windows, we preserve that\n        // so that UNC paths aren't broken.  Otherwise, any number of\n        // / characters are coalesced into one, unless\n        // preserveMultipleSlashes is set to true.\n        if (this.preserveMultipleSlashes) {\n            return p.split('/');\n        }\n        else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n            // add an extra '' for the one we lose\n            return ['', ...p.split(/\\/+/)];\n        }\n        else {\n            return p.split(/\\/+/);\n        }\n    }\n    match(f, partial = this.partial) {\n        this.debug('match', f, this.pattern);\n        // short-circuit in the case of busted things.\n        // comments, etc.\n        if (this.comment) {\n            return false;\n        }\n        if (this.empty) {\n            return f === '';\n        }\n        if (f === '/' && partial) {\n            return true;\n        }\n        const options = this.options;\n        // windows: need to use /, not \\\n        if (this.isWindows) {\n            f = f.split('\\\\').join('/');\n        }\n        // treat the test path as a set of pathparts.\n        const ff = this.slashSplit(f);\n        this.debug(this.pattern, 'split', ff);\n        // just ONE of the pattern sets in this.set needs to match\n        // in order for it to be valid.  If negating, then just one\n        // match means that we have failed.\n        // Either way, return on the first hit.\n        const set = this.set;\n        this.debug(this.pattern, 'set', set);\n        // Find the basename of the path by looking for the last non-empty segment\n        let filename = ff[ff.length - 1];\n        if (!filename) {\n            for (let i = ff.length - 2; !filename && i >= 0; i--) {\n                filename = ff[i];\n            }\n        }\n        for (let i = 0; i < set.length; i++) {\n            const pattern = set[i];\n            let file = ff;\n            if (options.matchBase && pattern.length === 1) {\n                file = [filename];\n            }\n            const hit = this.matchOne(file, pattern, partial);\n            if (hit) {\n                if (options.flipNegate) {\n                    return true;\n                }\n                return !this.negate;\n            }\n        }\n        // didn't get any hits.  this is success if it's a negative\n        // pattern, failure otherwise.\n        if (options.flipNegate) {\n            return false;\n        }\n        return this.negate;\n    }\n    static defaults(def) {\n        return exports.minimatch.defaults(def).Minimatch;\n    }\n}\nexports.Minimatch = Minimatch;\n/* c8 ignore start */\nvar escape_js_2 = require(\"./escape.js\");\nObject.defineProperty(exports, \"escape\", { enumerable: true, get: function () { return escape_js_2.escape; } });\nvar unescape_js_2 = require(\"./unescape.js\");\nObject.defineProperty(exports, \"unescape\", { enumerable: true, get: function () { return unescape_js_2.unescape; } });\n/* c8 ignore stop */\nexports.minimatch.Minimatch = Minimatch;\nexports.minimatch.escape = escape_js_1.escape;\nexports.minimatch.unescape = unescape_js_1.unescape;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unescape = void 0;\n/**\n * Un-escape a string that has been escaped with {@link escape}.\n *\n * If the {@link windowsPathsNoEscape} option is used, then square-brace\n * escapes are removed, but not backslash escapes.  For example, it will turn\n * the string `'[*]'` into `*`, but it will not turn `'\\\\*'` into `'*'`,\n * becuase `\\` is a path separator in `windowsPathsNoEscape` mode.\n *\n * When `windowsPathsNoEscape` is not set, then both brace escapes and\n * backslash escapes are removed.\n *\n * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped\n * or unescaped.\n */\nconst unescape = (s, { windowsPathsNoEscape = false, } = {}) => {\n    return windowsPathsNoEscape\n        ? s.replace(/\\[([^\\/\\\\])\\]/g, '$1')\n        : s.replace(/((?!\\\\).|^)\\[([^\\/\\\\])\\]/g, '$1$2').replace(/\\\\([^\\/])/g, '$1');\n};\nexports.unescape = unescape;\n//# sourceMappingURL=unescape.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MultipartBody = void 0;\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nclass MultipartBody {\n    constructor(body) {\n        this.body = body;\n    }\n    get [Symbol.toStringTag]() {\n        return 'MultipartBody';\n    }\n}\nexports.MultipartBody = MultipartBody;\n//# sourceMappingURL=MultipartBody.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n    for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\n__exportStar(require(\"../node-runtime.js\"), exports);\n//# sourceMappingURL=runtime-node.js.map","/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nconst shims = require('./registry');\nconst auto = require('openai/_shims/auto/runtime');\nif (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true });\nfor (const property of Object.keys(shims)) {\n  Object.defineProperty(exports, property, {\n    get() {\n      return shims[property];\n    },\n  });\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n    return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRuntime = void 0;\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nconst nf = __importStar(require(\"node-fetch\"));\nconst fd = __importStar(require(\"formdata-node\"));\nconst agentkeepalive_1 = __importDefault(require(\"agentkeepalive\"));\nconst abort_controller_1 = require(\"abort-controller\");\nconst node_fs_1 = require(\"node:fs\");\nconst form_data_encoder_1 = require(\"form-data-encoder\");\nconst node_stream_1 = require(\"node:stream\");\nconst MultipartBody_1 = require(\"./MultipartBody.js\");\n// @ts-ignore (this package does not have proper export maps for this export)\nconst ponyfill_es2018_js_1 = require(\"web-streams-polyfill/dist/ponyfill.es2018.js\");\nlet fileFromPathWarned = false;\nasync function fileFromPath(path, ...args) {\n    // this import fails in environments that don't handle export maps correctly, like old versions of Jest\n    const { fileFromPath: _fileFromPath } = await Promise.resolve().then(() => __importStar(require('formdata-node/file-from-path')));\n    if (!fileFromPathWarned) {\n        console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`);\n        fileFromPathWarned = true;\n    }\n    // @ts-ignore\n    return await _fileFromPath(path, ...args);\n}\nconst defaultHttpAgent = new agentkeepalive_1.default({ keepAlive: true, timeout: 5 * 60 * 1000 });\nconst defaultHttpsAgent = new agentkeepalive_1.default.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 });\nasync function getMultipartRequestOptions(form, opts) {\n    const encoder = new form_data_encoder_1.FormDataEncoder(form);\n    const readable = node_stream_1.Readable.from(encoder);\n    const body = new MultipartBody_1.MultipartBody(readable);\n    const headers = {\n        ...opts.headers,\n        ...encoder.headers,\n        'Content-Length': encoder.contentLength,\n    };\n    return { ...opts, body: body, headers };\n}\nfunction getRuntime() {\n    // Polyfill global object if needed.\n    if (typeof AbortController === 'undefined') {\n        // @ts-expect-error (the types are subtly different, but compatible in practice)\n        globalThis.AbortController = abort_controller_1.AbortController;\n    }\n    return {\n        kind: 'node',\n        fetch: nf.default,\n        Request: nf.Request,\n        Response: nf.Response,\n        Headers: nf.Headers,\n        FormData: fd.FormData,\n        Blob: fd.Blob,\n        File: fd.File,\n        ReadableStream: ponyfill_es2018_js_1.ReadableStream,\n        getMultipartRequestOptions,\n        getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent),\n        fileFromPath,\n        isFsReadStream: (value) => value instanceof node_fs_1.ReadStream,\n    };\n}\nexports.getRuntime = getRuntime;\n//# sourceMappingURL=node-runtime.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.setShims = exports.isFsReadStream = exports.fileFromPath = exports.getDefaultAgent = exports.getMultipartRequestOptions = exports.ReadableStream = exports.File = exports.Blob = exports.FormData = exports.Headers = exports.Response = exports.Request = exports.fetch = exports.kind = exports.auto = void 0;\nexports.auto = false;\nexports.kind = undefined;\nexports.fetch = undefined;\nexports.Request = undefined;\nexports.Response = undefined;\nexports.Headers = undefined;\nexports.FormData = undefined;\nexports.Blob = undefined;\nexports.File = undefined;\nexports.ReadableStream = undefined;\nexports.getMultipartRequestOptions = undefined;\nexports.getDefaultAgent = undefined;\nexports.fileFromPath = undefined;\nexports.isFsReadStream = undefined;\nfunction setShims(shims, options = { auto: false }) {\n    if (exports.auto) {\n        throw new Error(`you must \\`import 'openai/shims/${shims.kind}'\\` before importing anything else from openai`);\n    }\n    if (exports.kind) {\n        throw new Error(`can't \\`import 'openai/shims/${shims.kind}'\\` after \\`import 'openai/shims/${exports.kind}'\\``);\n    }\n    exports.auto = options.auto;\n    exports.kind = shims.kind;\n    exports.fetch = shims.fetch;\n    exports.Request = shims.Request;\n    exports.Response = shims.Response;\n    exports.Headers = shims.Headers;\n    exports.FormData = shims.FormData;\n    exports.Blob = shims.Blob;\n    exports.File = shims.File;\n    exports.ReadableStream = shims.ReadableStream;\n    exports.getMultipartRequestOptions = shims.getMultipartRequestOptions;\n    exports.getDefaultAgent = shims.getDefaultAgent;\n    exports.fileFromPath = shims.fileFromPath;\n    exports.isFsReadStream = shims.isFsReadStream;\n}\nexports.setShims = setShims;\n//# sourceMappingURL=registry.js.map","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _AbstractPage_client;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toBase64 = exports.getRequiredHeader = exports.isHeadersProtocol = exports.isRunningInBrowser = exports.debug = exports.hasOwn = exports.isEmptyObj = exports.maybeCoerceBoolean = exports.maybeCoerceFloat = exports.maybeCoerceInteger = exports.coerceBoolean = exports.coerceFloat = exports.coerceInteger = exports.readEnv = exports.ensurePresent = exports.castToError = exports.sleep = exports.safeJSON = exports.isRequestOptions = exports.createResponseHeaders = exports.PagePromise = exports.AbstractPage = exports.APIClient = exports.APIPromise = exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = void 0;\nconst version_1 = require(\"./version.js\");\nconst streaming_1 = require(\"./streaming.js\");\nconst error_1 = require(\"./error.js\");\nconst index_1 = require(\"./_shims/index.js\");\nconst uploads_1 = require(\"./uploads.js\");\nvar uploads_2 = require(\"./uploads.js\");\nObject.defineProperty(exports, \"maybeMultipartFormRequestOptions\", { enumerable: true, get: function () { return uploads_2.maybeMultipartFormRequestOptions; } });\nObject.defineProperty(exports, \"multipartFormRequestOptions\", { enumerable: true, get: function () { return uploads_2.multipartFormRequestOptions; } });\nObject.defineProperty(exports, \"createForm\", { enumerable: true, get: function () { return uploads_2.createForm; } });\nasync function defaultParseResponse(props) {\n    const { response } = props;\n    if (props.options.stream) {\n        debug('response', response.status, response.url, response.headers, response.body);\n        // Note: there is an invariant here that isn't represented in the type system\n        // that if you set `stream: true` the response type must also be `Stream`\n        return streaming_1.Stream.fromSSEResponse(response, props.controller);\n    }\n    // fetch refuses to read the body when the status code is 204.\n    if (response.status === 204) {\n        return null;\n    }\n    if (props.options.__binaryResponse) {\n        return response;\n    }\n    const contentType = response.headers.get('content-type');\n    if (contentType?.includes('application/json')) {\n        const json = await response.json();\n        debug('response', response.status, response.url, response.headers, json);\n        return json;\n    }\n    const text = await response.text();\n    debug('response', response.status, response.url, response.headers, text);\n    // TODO handle blob, arraybuffer, other content types, etc.\n    return text;\n}\n/**\n * A subclass of `Promise` providing additional helper methods\n * for interacting with the SDK.\n */\nclass APIPromise extends Promise {\n    constructor(responsePromise, parseResponse = defaultParseResponse) {\n        super((resolve) => {\n            // this is maybe a bit weird but this has to be a no-op to not implicitly\n            // parse the response body; instead .then, .catch, .finally are overridden\n            // to parse the response\n            resolve(null);\n        });\n        this.responsePromise = responsePromise;\n        this.parseResponse = parseResponse;\n    }\n    _thenUnwrap(transform) {\n        return new APIPromise(this.responsePromise, async (props) => transform(await this.parseResponse(props)));\n    }\n    /**\n     * Gets the raw `Response` instance instead of parsing the response\n     * data.\n     *\n     * If you want to parse the response body but still get the `Response`\n     * instance, you can use {@link withResponse()}.\n     *\n     * 👋 Getting the wrong TypeScript type for `Response`?\n     * Try setting `\"moduleResolution\": \"NodeNext\"` if you can,\n     * or add one of these imports before your first `import … from 'openai'`:\n     * - `import 'openai/shims/node'` (if you're running on Node)\n     * - `import 'openai/shims/web'` (otherwise)\n     */\n    asResponse() {\n        return this.responsePromise.then((p) => p.response);\n    }\n    /**\n     * Gets the parsed response data and the raw `Response` instance.\n     *\n     * If you just want to get the raw `Response` instance without parsing it,\n     * you can use {@link asResponse()}.\n     *\n     *\n     * 👋 Getting the wrong TypeScript type for `Response`?\n     * Try setting `\"moduleResolution\": \"NodeNext\"` if you can,\n     * or add one of these imports before your first `import … from 'openai'`:\n     * - `import 'openai/shims/node'` (if you're running on Node)\n     * - `import 'openai/shims/web'` (otherwise)\n     */\n    async withResponse() {\n        const [data, response] = await Promise.all([this.parse(), this.asResponse()]);\n        return { data, response };\n    }\n    parse() {\n        if (!this.parsedPromise) {\n            this.parsedPromise = this.responsePromise.then(this.parseResponse);\n        }\n        return this.parsedPromise;\n    }\n    then(onfulfilled, onrejected) {\n        return this.parse().then(onfulfilled, onrejected);\n    }\n    catch(onrejected) {\n        return this.parse().catch(onrejected);\n    }\n    finally(onfinally) {\n        return this.parse().finally(onfinally);\n    }\n}\nexports.APIPromise = APIPromise;\nclass APIClient {\n    constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes\n    httpAgent, fetch: overridenFetch, }) {\n        this.baseURL = baseURL;\n        this.maxRetries = validatePositiveInteger('maxRetries', maxRetries);\n        this.timeout = validatePositiveInteger('timeout', timeout);\n        this.httpAgent = httpAgent;\n        this.fetch = overridenFetch ?? index_1.fetch;\n    }\n    authHeaders(opts) {\n        return {};\n    }\n    /**\n     * Override this to add your own default headers, for example:\n     *\n     *  {\n     *    ...super.defaultHeaders(),\n     *    Authorization: 'Bearer 123',\n     *  }\n     */\n    defaultHeaders(opts) {\n        return {\n            Accept: 'application/json',\n            'Content-Type': 'application/json',\n            'User-Agent': this.getUserAgent(),\n            ...getPlatformHeaders(),\n            ...this.authHeaders(opts),\n        };\n    }\n    /**\n     * Override this to add your own headers validation:\n     */\n    validateHeaders(headers, customHeaders) { }\n    defaultIdempotencyKey() {\n        return `stainless-node-retry-${uuid4()}`;\n    }\n    get(path, opts) {\n        return this.methodRequest('get', path, opts);\n    }\n    post(path, opts) {\n        return this.methodRequest('post', path, opts);\n    }\n    patch(path, opts) {\n        return this.methodRequest('patch', path, opts);\n    }\n    put(path, opts) {\n        return this.methodRequest('put', path, opts);\n    }\n    delete(path, opts) {\n        return this.methodRequest('delete', path, opts);\n    }\n    methodRequest(method, path, opts) {\n        return this.request(Promise.resolve(opts).then((opts) => ({ method, path, ...opts })));\n    }\n    getAPIList(path, Page, opts) {\n        return this.requestAPIList(Page, { method: 'get', path, ...opts });\n    }\n    calculateContentLength(body) {\n        if (typeof body === 'string') {\n            if (typeof Buffer !== 'undefined') {\n                return Buffer.byteLength(body, 'utf8').toString();\n            }\n            if (typeof TextEncoder !== 'undefined') {\n                const encoder = new TextEncoder();\n                const encoded = encoder.encode(body);\n                return encoded.length.toString();\n            }\n        }\n        return null;\n    }\n    buildRequest(options) {\n        const { method, path, query, headers: headers = {} } = options;\n        const body = (0, uploads_1.isMultipartBody)(options.body) ? options.body.body\n            : options.body ? JSON.stringify(options.body, null, 2)\n                : null;\n        const contentLength = this.calculateContentLength(body);\n        const url = this.buildURL(path, query);\n        if ('timeout' in options)\n            validatePositiveInteger('timeout', options.timeout);\n        const timeout = options.timeout ?? this.timeout;\n        const httpAgent = options.httpAgent ?? this.httpAgent ?? (0, index_1.getDefaultAgent)(url);\n        const minAgentTimeout = timeout + 1000;\n        if (typeof httpAgent?.options?.timeout === 'number' &&\n            minAgentTimeout > (httpAgent.options.timeout ?? 0)) {\n            // Allow any given request to bump our agent active socket timeout.\n            // This may seem strange, but leaking active sockets should be rare and not particularly problematic,\n            // and without mutating agent we would need to create more of them.\n            // This tradeoff optimizes for performance.\n            httpAgent.options.timeout = minAgentTimeout;\n        }\n        if (this.idempotencyHeader && method !== 'get') {\n            if (!options.idempotencyKey)\n                options.idempotencyKey = this.defaultIdempotencyKey();\n            headers[this.idempotencyHeader] = options.idempotencyKey;\n        }\n        const reqHeaders = {\n            ...(contentLength && { 'Content-Length': contentLength }),\n            ...this.defaultHeaders(options),\n            ...headers,\n        };\n        // let builtin fetch set the Content-Type for multipart bodies\n        if ((0, uploads_1.isMultipartBody)(options.body) && index_1.kind !== 'node') {\n            delete reqHeaders['Content-Type'];\n        }\n        // Strip any headers being explicitly omitted with null\n        Object.keys(reqHeaders).forEach((key) => reqHeaders[key] === null && delete reqHeaders[key]);\n        const req = {\n            method,\n            ...(body && { body: body }),\n            headers: reqHeaders,\n            ...(httpAgent && { agent: httpAgent }),\n            // @ts-ignore node-fetch uses a custom AbortSignal type that is\n            // not compatible with standard web types\n            signal: options.signal ?? null,\n        };\n        this.validateHeaders(reqHeaders, headers);\n        return { req, url, timeout };\n    }\n    /**\n     * Used as a callback for mutating the given `RequestInit` object.\n     *\n     * This is useful for cases where you want to add certain headers based off of\n     * the request properties, e.g. `method` or `url`.\n     */\n    async prepareRequest(request, { url, options }) { }\n    parseHeaders(headers) {\n        return (!headers ? {}\n            : Symbol.iterator in headers ?\n                Object.fromEntries(Array.from(headers).map((header) => [...header]))\n                : { ...headers });\n    }\n    makeStatusError(status, error, message, headers) {\n        return error_1.APIError.generate(status, error, message, headers);\n    }\n    request(options, remainingRetries = null) {\n        return new APIPromise(this.makeRequest(options, remainingRetries));\n    }\n    async makeRequest(optionsInput, retriesRemaining) {\n        const options = await optionsInput;\n        if (retriesRemaining == null) {\n            retriesRemaining = options.maxRetries ?? this.maxRetries;\n        }\n        const { req, url, timeout } = this.buildRequest(options);\n        await this.prepareRequest(req, { url, options });\n        debug('request', url, options, req.headers);\n        if (options.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        const controller = new AbortController();\n        const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(exports.castToError);\n        if (response instanceof Error) {\n            if (options.signal?.aborted) {\n                throw new error_1.APIUserAbortError();\n            }\n            if (retriesRemaining) {\n                return this.retryRequest(options, retriesRemaining);\n            }\n            if (response.name === 'AbortError') {\n                throw new error_1.APIConnectionTimeoutError();\n            }\n            throw new error_1.APIConnectionError({ cause: response });\n        }\n        const responseHeaders = (0, exports.createResponseHeaders)(response.headers);\n        if (!response.ok) {\n            if (retriesRemaining && this.shouldRetry(response)) {\n                return this.retryRequest(options, retriesRemaining, responseHeaders);\n            }\n            const errText = await response.text().catch((e) => (0, exports.castToError)(e).message);\n            const errJSON = (0, exports.safeJSON)(errText);\n            const errMessage = errJSON ? undefined : errText;\n            debug('response', response.status, url, responseHeaders, errMessage);\n            const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders);\n            throw err;\n        }\n        return { response, options, controller };\n    }\n    requestAPIList(Page, options) {\n        const request = this.makeRequest(options, null);\n        return new PagePromise(this, request, Page);\n    }\n    buildURL(path, query) {\n        const url = isAbsoluteURL(path) ?\n            new URL(path)\n            : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path));\n        const defaultQuery = this.defaultQuery();\n        if (!isEmptyObj(defaultQuery)) {\n            query = { ...defaultQuery, ...query };\n        }\n        if (query) {\n            url.search = this.stringifyQuery(query);\n        }\n        return url.toString();\n    }\n    stringifyQuery(query) {\n        return Object.entries(query)\n            .filter(([_, value]) => typeof value !== 'undefined')\n            .map(([key, value]) => {\n            if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {\n                return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;\n            }\n            if (value === null) {\n                return `${encodeURIComponent(key)}=`;\n            }\n            throw new error_1.OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`);\n        })\n            .join('&');\n    }\n    async fetchWithTimeout(url, init, ms, controller) {\n        const { signal, ...options } = init || {};\n        if (signal)\n            signal.addEventListener('abort', () => controller.abort());\n        const timeout = setTimeout(() => controller.abort(), ms);\n        return (this.getRequestClient()\n            // use undefined this binding; fetch errors if bound to something else in browser/cloudflare\n            .fetch.call(undefined, url, { signal: controller.signal, ...options })\n            .finally(() => {\n            clearTimeout(timeout);\n        }));\n    }\n    getRequestClient() {\n        return { fetch: this.fetch };\n    }\n    shouldRetry(response) {\n        // Note this is not a standard header.\n        const shouldRetryHeader = response.headers.get('x-should-retry');\n        // If the server explicitly says whether or not to retry, obey.\n        if (shouldRetryHeader === 'true')\n            return true;\n        if (shouldRetryHeader === 'false')\n            return false;\n        // Retry on request timeouts.\n        if (response.status === 408)\n            return true;\n        // Retry on lock timeouts.\n        if (response.status === 409)\n            return true;\n        // Retry on rate limits.\n        if (response.status === 429)\n            return true;\n        // Retry internal errors.\n        if (response.status >= 500)\n            return true;\n        return false;\n    }\n    async retryRequest(options, retriesRemaining, responseHeaders) {\n        // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After\n        let timeoutMillis;\n        const retryAfterHeader = responseHeaders?.['retry-after'];\n        if (retryAfterHeader) {\n            const timeoutSeconds = parseInt(retryAfterHeader);\n            if (!Number.isNaN(timeoutSeconds)) {\n                timeoutMillis = timeoutSeconds * 1000;\n            }\n            else {\n                timeoutMillis = Date.parse(retryAfterHeader) - Date.now();\n            }\n        }\n        // If the API asks us to wait a certain amount of time (and it's a reasonable amount),\n        // just do what it says, but otherwise calculate a default\n        if (!timeoutMillis ||\n            !Number.isInteger(timeoutMillis) ||\n            timeoutMillis <= 0 ||\n            timeoutMillis > 60 * 1000) {\n            const maxRetries = options.maxRetries ?? this.maxRetries;\n            timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);\n        }\n        await (0, exports.sleep)(timeoutMillis);\n        return this.makeRequest(options, retriesRemaining - 1);\n    }\n    calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {\n        const initialRetryDelay = 0.5;\n        const maxRetryDelay = 8.0;\n        const numRetries = maxRetries - retriesRemaining;\n        // Apply exponential backoff, but not more than the max.\n        const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);\n        // Apply some jitter, take up to at most 25 percent of the retry time.\n        const jitter = 1 - Math.random() * 0.25;\n        return sleepSeconds * jitter * 1000;\n    }\n    getUserAgent() {\n        return `${this.constructor.name}/JS ${version_1.VERSION}`;\n    }\n}\nexports.APIClient = APIClient;\nclass AbstractPage {\n    constructor(client, response, body, options) {\n        _AbstractPage_client.set(this, void 0);\n        __classPrivateFieldSet(this, _AbstractPage_client, client, \"f\");\n        this.options = options;\n        this.response = response;\n        this.body = body;\n    }\n    hasNextPage() {\n        const items = this.getPaginatedItems();\n        if (!items.length)\n            return false;\n        return this.nextPageInfo() != null;\n    }\n    async getNextPage() {\n        const nextInfo = this.nextPageInfo();\n        if (!nextInfo) {\n            throw new error_1.OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.');\n        }\n        const nextOptions = { ...this.options };\n        if ('params' in nextInfo) {\n            nextOptions.query = { ...nextOptions.query, ...nextInfo.params };\n        }\n        else if ('url' in nextInfo) {\n            const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()];\n            for (const [key, value] of params) {\n                nextInfo.url.searchParams.set(key, value);\n            }\n            nextOptions.query = undefined;\n            nextOptions.path = nextInfo.url.toString();\n        }\n        return await __classPrivateFieldGet(this, _AbstractPage_client, \"f\").requestAPIList(this.constructor, nextOptions);\n    }\n    async *iterPages() {\n        // eslint-disable-next-line @typescript-eslint/no-this-alias\n        let page = this;\n        yield page;\n        while (page.hasNextPage()) {\n            page = await page.getNextPage();\n            yield page;\n        }\n    }\n    async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() {\n        for await (const page of this.iterPages()) {\n            for (const item of page.getPaginatedItems()) {\n                yield item;\n            }\n        }\n    }\n}\nexports.AbstractPage = AbstractPage;\n/**\n * This subclass of Promise will resolve to an instantiated Page once the request completes.\n *\n * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg:\n *\n *    for await (const item of client.items.list()) {\n *      console.log(item)\n *    }\n */\nclass PagePromise extends APIPromise {\n    constructor(client, request, Page) {\n        super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options));\n    }\n    /**\n     * Allow auto-paginating iteration on an unawaited list call, eg:\n     *\n     *    for await (const item of client.items.list()) {\n     *      console.log(item)\n     *    }\n     */\n    async *[Symbol.asyncIterator]() {\n        const page = await this;\n        for await (const item of page) {\n            yield item;\n        }\n    }\n}\nexports.PagePromise = PagePromise;\nconst createResponseHeaders = (headers) => {\n    return new Proxy(Object.fromEntries(\n    // @ts-ignore\n    headers.entries()), {\n        get(target, name) {\n            const key = name.toString();\n            return target[key.toLowerCase()] || target[key];\n        },\n    });\n};\nexports.createResponseHeaders = createResponseHeaders;\n// This is required so that we can determine if a given object matches the RequestOptions\n// type at runtime. While this requires duplication, it is enforced by the TypeScript\n// compiler such that any missing / extraneous keys will cause an error.\nconst requestOptionsKeys = {\n    method: true,\n    path: true,\n    query: true,\n    body: true,\n    headers: true,\n    maxRetries: true,\n    stream: true,\n    timeout: true,\n    httpAgent: true,\n    signal: true,\n    idempotencyKey: true,\n    __binaryResponse: true,\n};\nconst isRequestOptions = (obj) => {\n    return (typeof obj === 'object' &&\n        obj !== null &&\n        !isEmptyObj(obj) &&\n        Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k)));\n};\nexports.isRequestOptions = isRequestOptions;\nconst getPlatformProperties = () => {\n    if (typeof Deno !== 'undefined' && Deno.build != null) {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': normalizePlatform(Deno.build.os),\n            'X-Stainless-Arch': normalizeArch(Deno.build.arch),\n            'X-Stainless-Runtime': 'deno',\n            'X-Stainless-Runtime-Version': Deno.version,\n        };\n    }\n    if (typeof EdgeRuntime !== 'undefined') {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': 'Unknown',\n            'X-Stainless-Arch': `other:${EdgeRuntime}`,\n            'X-Stainless-Runtime': 'edge',\n            'X-Stainless-Runtime-Version': process.version,\n        };\n    }\n    // Check if Node.js\n    if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': normalizePlatform(process.platform),\n            'X-Stainless-Arch': normalizeArch(process.arch),\n            'X-Stainless-Runtime': 'node',\n            'X-Stainless-Runtime-Version': process.version,\n        };\n    }\n    const browserInfo = getBrowserInfo();\n    if (browserInfo) {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': 'Unknown',\n            'X-Stainless-Arch': 'unknown',\n            'X-Stainless-Runtime': `browser:${browserInfo.browser}`,\n            'X-Stainless-Runtime-Version': browserInfo.version,\n        };\n    }\n    // TODO add support for Cloudflare workers, etc.\n    return {\n        'X-Stainless-Lang': 'js',\n        'X-Stainless-Package-Version': version_1.VERSION,\n        'X-Stainless-OS': 'Unknown',\n        'X-Stainless-Arch': 'unknown',\n        'X-Stainless-Runtime': 'unknown',\n        'X-Stainless-Runtime-Version': 'unknown',\n    };\n};\n// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts\nfunction getBrowserInfo() {\n    if (typeof navigator === 'undefined' || !navigator) {\n        return null;\n    }\n    // NOTE: The order matters here!\n    const browserPatterns = [\n        { key: 'edge', pattern: /Edge(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'ie', pattern: /MSIE(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'ie', pattern: /Trident(?:.*rv\\:(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'chrome', pattern: /Chrome(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'firefox', pattern: /Firefox(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'safari', pattern: /(?:Version\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?(?:\\W+Mobile\\S*)?\\W+Safari/ },\n    ];\n    // Find the FIRST matching browser\n    for (const { key, pattern } of browserPatterns) {\n        const match = pattern.exec(navigator.userAgent);\n        if (match) {\n            const major = match[1] || 0;\n            const minor = match[2] || 0;\n            const patch = match[3] || 0;\n            return { browser: key, version: `${major}.${minor}.${patch}` };\n        }\n    }\n    return null;\n}\nconst normalizeArch = (arch) => {\n    // Node docs:\n    // - https://nodejs.org/api/process.html#processarch\n    // Deno docs:\n    // - https://doc.deno.land/deno/stable/~/Deno.build\n    if (arch === 'x32')\n        return 'x32';\n    if (arch === 'x86_64' || arch === 'x64')\n        return 'x64';\n    if (arch === 'arm')\n        return 'arm';\n    if (arch === 'aarch64' || arch === 'arm64')\n        return 'arm64';\n    if (arch)\n        return `other:${arch}`;\n    return 'unknown';\n};\nconst normalizePlatform = (platform) => {\n    // Node platforms:\n    // - https://nodejs.org/api/process.html#processplatform\n    // Deno platforms:\n    // - https://doc.deno.land/deno/stable/~/Deno.build\n    // - https://github.com/denoland/deno/issues/14799\n    platform = platform.toLowerCase();\n    // NOTE: this iOS check is untested and may not work\n    // Node does not work natively on IOS, there is a fork at\n    // https://github.com/nodejs-mobile/nodejs-mobile\n    // however it is unknown at the time of writing how to detect if it is running\n    if (platform.includes('ios'))\n        return 'iOS';\n    if (platform === 'android')\n        return 'Android';\n    if (platform === 'darwin')\n        return 'MacOS';\n    if (platform === 'win32')\n        return 'Windows';\n    if (platform === 'freebsd')\n        return 'FreeBSD';\n    if (platform === 'openbsd')\n        return 'OpenBSD';\n    if (platform === 'linux')\n        return 'Linux';\n    if (platform)\n        return `Other:${platform}`;\n    return 'Unknown';\n};\nlet _platformHeaders;\nconst getPlatformHeaders = () => {\n    return (_platformHeaders ?? (_platformHeaders = getPlatformProperties()));\n};\nconst safeJSON = (text) => {\n    try {\n        return JSON.parse(text);\n    }\n    catch (err) {\n        return undefined;\n    }\n};\nexports.safeJSON = safeJSON;\n// https://stackoverflow.com/a/19709846\nconst startsWithSchemeRegexp = new RegExp('^(?:[a-z]+:)?//', 'i');\nconst isAbsoluteURL = (url) => {\n    return startsWithSchemeRegexp.test(url);\n};\nconst sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));\nexports.sleep = sleep;\nconst validatePositiveInteger = (name, n) => {\n    if (typeof n !== 'number' || !Number.isInteger(n)) {\n        throw new error_1.OpenAIError(`${name} must be an integer`);\n    }\n    if (n < 0) {\n        throw new error_1.OpenAIError(`${name} must be a positive integer`);\n    }\n    return n;\n};\nconst castToError = (err) => {\n    if (err instanceof Error)\n        return err;\n    return new Error(err);\n};\nexports.castToError = castToError;\nconst ensurePresent = (value) => {\n    if (value == null)\n        throw new error_1.OpenAIError(`Expected a value to be given but received ${value} instead.`);\n    return value;\n};\nexports.ensurePresent = ensurePresent;\n/**\n * Read an environment variable.\n *\n * Will return undefined if the environment variable doesn't exist or cannot be accessed.\n */\nconst readEnv = (env) => {\n    if (typeof process !== 'undefined') {\n        return process.env?.[env] ?? undefined;\n    }\n    if (typeof Deno !== 'undefined') {\n        return Deno.env?.get?.(env);\n    }\n    return undefined;\n};\nexports.readEnv = readEnv;\nconst coerceInteger = (value) => {\n    if (typeof value === 'number')\n        return Math.round(value);\n    if (typeof value === 'string')\n        return parseInt(value, 10);\n    throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`);\n};\nexports.coerceInteger = coerceInteger;\nconst coerceFloat = (value) => {\n    if (typeof value === 'number')\n        return value;\n    if (typeof value === 'string')\n        return parseFloat(value);\n    throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`);\n};\nexports.coerceFloat = coerceFloat;\nconst coerceBoolean = (value) => {\n    if (typeof value === 'boolean')\n        return value;\n    if (typeof value === 'string')\n        return value === 'true';\n    return Boolean(value);\n};\nexports.coerceBoolean = coerceBoolean;\nconst maybeCoerceInteger = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceInteger)(value);\n};\nexports.maybeCoerceInteger = maybeCoerceInteger;\nconst maybeCoerceFloat = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceFloat)(value);\n};\nexports.maybeCoerceFloat = maybeCoerceFloat;\nconst maybeCoerceBoolean = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceBoolean)(value);\n};\nexports.maybeCoerceBoolean = maybeCoerceBoolean;\n// https://stackoverflow.com/a/34491287\nfunction isEmptyObj(obj) {\n    if (!obj)\n        return true;\n    for (const _k in obj)\n        return false;\n    return true;\n}\nexports.isEmptyObj = isEmptyObj;\n// https://eslint.org/docs/latest/rules/no-prototype-builtins\nfunction hasOwn(obj, key) {\n    return Object.prototype.hasOwnProperty.call(obj, key);\n}\nexports.hasOwn = hasOwn;\nfunction debug(action, ...args) {\n    if (typeof process !== 'undefined' && process.env['DEBUG'] === 'true') {\n        console.log(`OpenAI:DEBUG:${action}`, ...args);\n    }\n}\nexports.debug = debug;\n/**\n * https://stackoverflow.com/a/2117523\n */\nconst uuid4 = () => {\n    return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {\n        const r = (Math.random() * 16) | 0;\n        const v = c === 'x' ? r : (r & 0x3) | 0x8;\n        return v.toString(16);\n    });\n};\nconst isRunningInBrowser = () => {\n    return (\n    // @ts-ignore\n    typeof window !== 'undefined' &&\n        // @ts-ignore\n        typeof window.document !== 'undefined' &&\n        // @ts-ignore\n        typeof navigator !== 'undefined');\n};\nexports.isRunningInBrowser = isRunningInBrowser;\nconst isHeadersProtocol = (headers) => {\n    return typeof headers?.get === 'function';\n};\nexports.isHeadersProtocol = isHeadersProtocol;\nconst getRequiredHeader = (headers, header) => {\n    const lowerCasedHeader = header.toLowerCase();\n    if ((0, exports.isHeadersProtocol)(headers)) {\n        // to deal with the case where the header looks like Stainless-Event-Id\n        const intercapsHeader = header[0]?.toUpperCase() +\n            header.substring(1).replace(/([^\\w])(\\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase());\n        for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) {\n            const value = headers.get(key);\n            if (value) {\n                return value;\n            }\n        }\n    }\n    for (const [key, value] of Object.entries(headers)) {\n        if (key.toLowerCase() === lowerCasedHeader) {\n            if (Array.isArray(value)) {\n                if (value.length <= 1)\n                    return value[0];\n                console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`);\n                return value[0];\n            }\n            return value;\n        }\n    }\n    throw new Error(`Could not find ${header} header`);\n};\nexports.getRequiredHeader = getRequiredHeader;\n/**\n * Encodes a string to Base64 format.\n */\nconst toBase64 = (str) => {\n    if (!str)\n        return '';\n    if (typeof Buffer !== 'undefined') {\n        return Buffer.from(str).toString('base64');\n    }\n    if (typeof btoa !== 'undefined') {\n        return btoa(str);\n    }\n    throw new error_1.OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined');\n};\nexports.toBase64 = toBase64;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InternalServerError = exports.RateLimitError = exports.UnprocessableEntityError = exports.ConflictError = exports.NotFoundError = exports.PermissionDeniedError = exports.AuthenticationError = exports.BadRequestError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIUserAbortError = exports.APIError = exports.OpenAIError = void 0;\nconst core_1 = require(\"./core.js\");\nclass OpenAIError extends Error {\n}\nexports.OpenAIError = OpenAIError;\nclass APIError extends OpenAIError {\n    constructor(status, error, message, headers) {\n        super(`${APIError.makeMessage(status, error, message)}`);\n        this.status = status;\n        this.headers = headers;\n        const data = error;\n        this.error = data;\n        this.code = data?.['code'];\n        this.param = data?.['param'];\n        this.type = data?.['type'];\n    }\n    static makeMessage(status, error, message) {\n        const msg = error?.message ?\n            typeof error.message === 'string' ? error.message\n                : JSON.stringify(error.message)\n            : error ? JSON.stringify(error)\n                : message;\n        if (status && msg) {\n            return `${status} ${msg}`;\n        }\n        if (status) {\n            return `${status} status code (no body)`;\n        }\n        if (msg) {\n            return msg;\n        }\n        return '(no status code or body)';\n    }\n    static generate(status, errorResponse, message, headers) {\n        if (!status) {\n            return new APIConnectionError({ cause: (0, core_1.castToError)(errorResponse) });\n        }\n        const error = errorResponse?.['error'];\n        if (status === 400) {\n            return new BadRequestError(status, error, message, headers);\n        }\n        if (status === 401) {\n            return new AuthenticationError(status, error, message, headers);\n        }\n        if (status === 403) {\n            return new PermissionDeniedError(status, error, message, headers);\n        }\n        if (status === 404) {\n            return new NotFoundError(status, error, message, headers);\n        }\n        if (status === 409) {\n            return new ConflictError(status, error, message, headers);\n        }\n        if (status === 422) {\n            return new UnprocessableEntityError(status, error, message, headers);\n        }\n        if (status === 429) {\n            return new RateLimitError(status, error, message, headers);\n        }\n        if (status >= 500) {\n            return new InternalServerError(status, error, message, headers);\n        }\n        return new APIError(status, error, message, headers);\n    }\n}\nexports.APIError = APIError;\nclass APIUserAbortError extends APIError {\n    constructor({ message } = {}) {\n        super(undefined, undefined, message || 'Request was aborted.', undefined);\n        this.status = undefined;\n    }\n}\nexports.APIUserAbortError = APIUserAbortError;\nclass APIConnectionError extends APIError {\n    constructor({ message, cause }) {\n        super(undefined, undefined, message || 'Connection error.', undefined);\n        this.status = undefined;\n        // in some environments the 'cause' property is already declared\n        // @ts-ignore\n        if (cause)\n            this.cause = cause;\n    }\n}\nexports.APIConnectionError = APIConnectionError;\nclass APIConnectionTimeoutError extends APIConnectionError {\n    constructor({ message } = {}) {\n        super({ message: message ?? 'Request timed out.' });\n    }\n}\nexports.APIConnectionTimeoutError = APIConnectionTimeoutError;\nclass BadRequestError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 400;\n    }\n}\nexports.BadRequestError = BadRequestError;\nclass AuthenticationError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 401;\n    }\n}\nexports.AuthenticationError = AuthenticationError;\nclass PermissionDeniedError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 403;\n    }\n}\nexports.PermissionDeniedError = PermissionDeniedError;\nclass NotFoundError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 404;\n    }\n}\nexports.NotFoundError = NotFoundError;\nclass ConflictError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 409;\n    }\n}\nexports.ConflictError = ConflictError;\nclass UnprocessableEntityError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 422;\n    }\n}\nexports.UnprocessableEntityError = UnprocessableEntityError;\nclass RateLimitError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 429;\n    }\n}\nexports.RateLimitError = RateLimitError;\nclass InternalServerError extends APIError {\n}\nexports.InternalServerError = InternalServerError;\n//# sourceMappingURL=error.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fileFromPath = exports.toFile = exports.UnprocessableEntityError = exports.PermissionDeniedError = exports.InternalServerError = exports.AuthenticationError = exports.BadRequestError = exports.RateLimitError = exports.ConflictError = exports.NotFoundError = exports.APIUserAbortError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIError = exports.OpenAIError = exports.OpenAI = void 0;\nconst Core = __importStar(require(\"./core.js\"));\nconst Pagination = __importStar(require(\"./pagination.js\"));\nconst Errors = __importStar(require(\"./error.js\"));\nconst Uploads = __importStar(require(\"./uploads.js\"));\nconst API = __importStar(require(\"openai/resources/index\"));\n/** API Client for interfacing with the OpenAI API. */\nclass OpenAI extends Core.APIClient {\n    /**\n     * API Client for interfacing with the OpenAI API.\n     *\n     * @param {string} [opts.apiKey==process.env['OPENAI_API_KEY'] ?? undefined]\n     * @param {string | null} [opts.organization==process.env['OPENAI_ORG_ID'] ?? null]\n     * @param {string} [opts.baseURL] - Override the default base URL for the API.\n     * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.\n     * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections.\n     * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.\n     * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.\n     * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API.\n     * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API.\n     * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.\n     */\n    constructor({ apiKey = Core.readEnv('OPENAI_API_KEY'), organization = Core.readEnv('OPENAI_ORG_ID') ?? null, ...opts } = {}) {\n        if (apiKey === undefined) {\n            throw new Errors.OpenAIError(\"The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' }).\");\n        }\n        const options = {\n            apiKey,\n            organization,\n            ...opts,\n            baseURL: opts.baseURL ?? `https://api.openai.com/v1`,\n        };\n        if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) {\n            throw new Errors.OpenAIError(\"It looks like you're running in a browser-like environment.\\n\\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\\nIf you understand the risks and have appropriate mitigations in place,\\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\\n\\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\\n\\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\\n\");\n        }\n        super({\n            baseURL: options.baseURL,\n            timeout: options.timeout ?? 600000 /* 10 minutes */,\n            httpAgent: options.httpAgent,\n            maxRetries: options.maxRetries,\n            fetch: options.fetch,\n        });\n        this.completions = new API.Completions(this);\n        this.chat = new API.Chat(this);\n        this.edits = new API.Edits(this);\n        this.embeddings = new API.Embeddings(this);\n        this.files = new API.Files(this);\n        this.images = new API.Images(this);\n        this.audio = new API.Audio(this);\n        this.moderations = new API.Moderations(this);\n        this.models = new API.Models(this);\n        this.fineTuning = new API.FineTuning(this);\n        this.fineTunes = new API.FineTunes(this);\n        this.beta = new API.Beta(this);\n        this._options = options;\n        this.apiKey = apiKey;\n        this.organization = organization;\n    }\n    defaultQuery() {\n        return this._options.defaultQuery;\n    }\n    defaultHeaders(opts) {\n        return {\n            ...super.defaultHeaders(opts),\n            'OpenAI-Organization': this.organization,\n            ...this._options.defaultHeaders,\n        };\n    }\n    authHeaders(opts) {\n        return { Authorization: `Bearer ${this.apiKey}` };\n    }\n}\nexports.OpenAI = OpenAI;\n_a = OpenAI;\nOpenAI.OpenAI = _a;\nOpenAI.OpenAIError = Errors.OpenAIError;\nOpenAI.APIError = Errors.APIError;\nOpenAI.APIConnectionError = Errors.APIConnectionError;\nOpenAI.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError;\nOpenAI.APIUserAbortError = Errors.APIUserAbortError;\nOpenAI.NotFoundError = Errors.NotFoundError;\nOpenAI.ConflictError = Errors.ConflictError;\nOpenAI.RateLimitError = Errors.RateLimitError;\nOpenAI.BadRequestError = Errors.BadRequestError;\nOpenAI.AuthenticationError = Errors.AuthenticationError;\nOpenAI.InternalServerError = Errors.InternalServerError;\nOpenAI.PermissionDeniedError = Errors.PermissionDeniedError;\nOpenAI.UnprocessableEntityError = Errors.UnprocessableEntityError;\nexports.OpenAIError = Errors.OpenAIError, exports.APIError = Errors.APIError, exports.APIConnectionError = Errors.APIConnectionError, exports.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError, exports.APIUserAbortError = Errors.APIUserAbortError, exports.NotFoundError = Errors.NotFoundError, exports.ConflictError = Errors.ConflictError, exports.RateLimitError = Errors.RateLimitError, exports.BadRequestError = Errors.BadRequestError, exports.AuthenticationError = Errors.AuthenticationError, exports.InternalServerError = Errors.InternalServerError, exports.PermissionDeniedError = Errors.PermissionDeniedError, exports.UnprocessableEntityError = Errors.UnprocessableEntityError;\nexports.toFile = Uploads.toFile;\nexports.fileFromPath = Uploads.fileFromPath;\n(function (OpenAI) {\n    // Helper functions\n    OpenAI.toFile = Uploads.toFile;\n    OpenAI.fileFromPath = Uploads.fileFromPath;\n    OpenAI.Page = Pagination.Page;\n    OpenAI.CursorPage = Pagination.CursorPage;\n    OpenAI.Completions = API.Completions;\n    OpenAI.Chat = API.Chat;\n    OpenAI.Edits = API.Edits;\n    OpenAI.Embeddings = API.Embeddings;\n    OpenAI.Files = API.Files;\n    OpenAI.FileObjectsPage = API.FileObjectsPage;\n    OpenAI.Images = API.Images;\n    OpenAI.Audio = API.Audio;\n    OpenAI.Moderations = API.Moderations;\n    OpenAI.Models = API.Models;\n    OpenAI.ModelsPage = API.ModelsPage;\n    OpenAI.FineTuning = API.FineTuning;\n    OpenAI.FineTunes = API.FineTunes;\n    OpenAI.FineTunesPage = API.FineTunesPage;\n    OpenAI.Beta = API.Beta;\n})(OpenAI = exports.OpenAI || (exports.OpenAI = {}));\nexports = module.exports = OpenAI;\nexports.default = OpenAI;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _AbstractChatCompletionRunner_instances, _AbstractChatCompletionRunner_connectedPromise, _AbstractChatCompletionRunner_resolveConnectedPromise, _AbstractChatCompletionRunner_rejectConnectedPromise, _AbstractChatCompletionRunner_endPromise, _AbstractChatCompletionRunner_resolveEndPromise, _AbstractChatCompletionRunner_rejectEndPromise, _AbstractChatCompletionRunner_listeners, _AbstractChatCompletionRunner_ended, _AbstractChatCompletionRunner_errored, _AbstractChatCompletionRunner_aborted, _AbstractChatCompletionRunner_catchingPromiseCreated, _AbstractChatCompletionRunner_getFinalContent, _AbstractChatCompletionRunner_getFinalMessage, _AbstractChatCompletionRunner_getFinalFunctionCall, _AbstractChatCompletionRunner_getFinalFunctionCallResult, _AbstractChatCompletionRunner_calculateTotalUsage, _AbstractChatCompletionRunner_handleError, _AbstractChatCompletionRunner_validateParams, _AbstractChatCompletionRunner_stringifyFunctionCallResult;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AbstractChatCompletionRunner = void 0;\nconst error_1 = require(\"openai/error\");\nconst RunnableFunction_1 = require(\"./RunnableFunction.js\");\nconst chatCompletionUtils_1 = require(\"./chatCompletionUtils.js\");\nconst DEFAULT_MAX_CHAT_COMPLETIONS = 10;\nclass AbstractChatCompletionRunner {\n    constructor() {\n        _AbstractChatCompletionRunner_instances.add(this);\n        this.controller = new AbortController();\n        _AbstractChatCompletionRunner_connectedPromise.set(this, void 0);\n        _AbstractChatCompletionRunner_resolveConnectedPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_rejectConnectedPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_endPromise.set(this, void 0);\n        _AbstractChatCompletionRunner_resolveEndPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_rejectEndPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_listeners.set(this, {});\n        this._chatCompletions = [];\n        this.messages = [];\n        _AbstractChatCompletionRunner_ended.set(this, false);\n        _AbstractChatCompletionRunner_errored.set(this, false);\n        _AbstractChatCompletionRunner_aborted.set(this, false);\n        _AbstractChatCompletionRunner_catchingPromiseCreated.set(this, false);\n        _AbstractChatCompletionRunner_handleError.set(this, (error) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_errored, true, \"f\");\n            if (error instanceof Error && error.name === 'AbortError') {\n                error = new error_1.APIUserAbortError();\n            }\n            if (error instanceof error_1.APIUserAbortError) {\n                __classPrivateFieldSet(this, _AbstractChatCompletionRunner_aborted, true, \"f\");\n                return this._emit('abort', error);\n            }\n            if (error instanceof error_1.OpenAIError) {\n                return this._emit('error', error);\n            }\n            if (error instanceof Error) {\n                const openAIError = new error_1.OpenAIError(error.message);\n                // @ts-ignore\n                openAIError.cause = error;\n                return this._emit('error', openAIError);\n            }\n            return this._emit('error', new error_1.OpenAIError(String(error)));\n        });\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_connectedPromise, new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_resolveConnectedPromise, resolve, \"f\");\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, reject, \"f\");\n        }), \"f\");\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_endPromise, new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_resolveEndPromise, resolve, \"f\");\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_rejectEndPromise, reject, \"f\");\n        }), \"f\");\n        // Don't let these promises cause unhandled rejection errors.\n        // we will manually cause an unhandled rejection error later\n        // if the user hasn't registered any error listener or called\n        // any promise-returning method.\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_connectedPromise, \"f\").catch(() => { });\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_endPromise, \"f\").catch(() => { });\n    }\n    _run(executor) {\n        // Unfortunately if we call `executor()` immediately we get runtime errors about\n        // references to `this` before the `super()` constructor call returns.\n        setTimeout(() => {\n            executor().then(() => {\n                this._emitFinal();\n                this._emit('end');\n            }, __classPrivateFieldGet(this, _AbstractChatCompletionRunner_handleError, \"f\"));\n        }, 0);\n    }\n    _addChatCompletion(chatCompletion) {\n        this._chatCompletions.push(chatCompletion);\n        this._emit('chatCompletion', chatCompletion);\n        const message = chatCompletion.choices[0]?.message;\n        if (message)\n            this._addMessage(message);\n        return chatCompletion;\n    }\n    _addMessage(message, emit = true) {\n        this.messages.push(message);\n        if (emit) {\n            this._emit('message', message);\n            if (((0, chatCompletionUtils_1.isFunctionMessage)(message) || (0, chatCompletionUtils_1.isToolMessage)(message)) && message.content) {\n                // Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function.\n                this._emit('functionCallResult', message.content);\n            }\n            else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.function_call) {\n                this._emit('functionCall', message.function_call);\n            }\n            else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.tool_calls) {\n                for (const tool_call of message.tool_calls) {\n                    if (tool_call.type === 'function') {\n                        this._emit('functionCall', tool_call.function);\n                    }\n                }\n            }\n        }\n    }\n    _connected() {\n        if (this.ended)\n            return;\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_resolveConnectedPromise, \"f\").call(this);\n        this._emit('connect');\n    }\n    get ended() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_ended, \"f\");\n    }\n    get errored() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_errored, \"f\");\n    }\n    get aborted() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_aborted, \"f\");\n    }\n    abort() {\n        this.controller.abort();\n    }\n    /**\n     * Adds the listener function to the end of the listeners array for the event.\n     * No checks are made to see if the listener has already been added. Multiple calls passing\n     * the same combination of event and listener will result in the listener being added, and\n     * called, multiple times.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    on(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] || (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = []);\n        listeners.push({ listener });\n        return this;\n    }\n    /**\n     * Removes the specified listener from the listener array for the event.\n     * off() will remove, at most, one instance of a listener from the listener array. If any single\n     * listener has been added multiple times to the listener array for the specified event, then\n     * off() must be called multiple times to remove each instance.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    off(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event];\n        if (!listeners)\n            return this;\n        const index = listeners.findIndex((l) => l.listener === listener);\n        if (index >= 0)\n            listeners.splice(index, 1);\n        return this;\n    }\n    /**\n     * Adds a one-time listener function for the event. The next time the event is triggered,\n     * this listener is removed and then invoked.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    once(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] || (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = []);\n        listeners.push({ listener, once: true });\n        return this;\n    }\n    /**\n     * This is similar to `.once()`, but returns a Promise that resolves the next time\n     * the event is triggered, instead of calling a listener callback.\n     * @returns a Promise that resolves the next time given event is triggered,\n     * or rejects if an error is emitted.  (If you request the 'error' event,\n     * returns a promise that resolves with the error).\n     *\n     * Example:\n     *\n     *   const message = await stream.emitted('message') // rejects if the stream errors\n     */\n    emitted(event) {\n        return new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, true, \"f\");\n            if (event !== 'error')\n                this.once('error', reject);\n            this.once(event, resolve);\n        });\n    }\n    async done() {\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, true, \"f\");\n        await __classPrivateFieldGet(this, _AbstractChatCompletionRunner_endPromise, \"f\");\n    }\n    /**\n     * @returns a promise that resolves with the final ChatCompletion, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletion.\n     */\n    async finalChatCompletion() {\n        await this.done();\n        const completion = this._chatCompletions[this._chatCompletions.length - 1];\n        if (!completion)\n            throw new error_1.OpenAIError('stream ended without producing a ChatCompletion');\n        return completion;\n    }\n    /**\n     * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalContent() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalContent).call(this);\n    }\n    /**\n     * @returns a promise that resolves with the the final assistant ChatCompletionMessage response,\n     * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalMessage() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalMessage).call(this);\n    }\n    /**\n     * @returns a promise that resolves with the content of the final FunctionCall, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalFunctionCall() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);\n    }\n    async finalFunctionCallResult() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);\n    }\n    async totalUsage() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_calculateTotalUsage).call(this);\n    }\n    allChatCompletions() {\n        return [...this._chatCompletions];\n    }\n    _emit(event, ...args) {\n        // make sure we don't emit any events after end\n        if (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_ended, \"f\"))\n            return;\n        if (event === 'end') {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_ended, true, \"f\");\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_resolveEndPromise, \"f\").call(this);\n        }\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event];\n        if (listeners) {\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = listeners.filter((l) => !l.once);\n            listeners.forEach(({ listener }) => listener(...args));\n        }\n        if (event === 'abort') {\n            const error = args[0];\n            if (!__classPrivateFieldGet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, \"f\") && !listeners?.length) {\n                Promise.reject(error);\n            }\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, \"f\").call(this, error);\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectEndPromise, \"f\").call(this, error);\n            this._emit('end');\n            return;\n        }\n        if (event === 'error') {\n            // NOTE: _emit('error', error) should only be called from #handleError().\n            const error = args[0];\n            if (!__classPrivateFieldGet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, \"f\") && !listeners?.length) {\n                // Trigger an unhandled rejection if the user hasn't registered any error handlers.\n                // If you are seeing stack traces here, make sure to handle errors via either:\n                // - runner.on('error', () => ...)\n                // - await runner.done()\n                // - await runner.finalChatCompletion()\n                // - etc.\n                Promise.reject(error);\n            }\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, \"f\").call(this, error);\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectEndPromise, \"f\").call(this, error);\n            this._emit('end');\n        }\n    }\n    _emitFinal() {\n        const completion = this._chatCompletions[this._chatCompletions.length - 1];\n        if (completion)\n            this._emit('finalChatCompletion', completion);\n        const finalMessage = this.messages[this.messages.length - 1];\n        if (finalMessage)\n            this._emit('finalMessage', finalMessage);\n        const finalContent = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalContent).call(this);\n        if (finalContent)\n            this._emit('finalContent', finalContent);\n        const finalFunctionCall = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);\n        if (finalFunctionCall)\n            this._emit('finalFunctionCall', finalFunctionCall);\n        const finalFunctionCallResult = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);\n        if (finalFunctionCallResult != null)\n            this._emit('finalFunctionCallResult', finalFunctionCallResult);\n        if (this._chatCompletions.some((c) => c.usage)) {\n            this._emit('totalUsage', __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_calculateTotalUsage).call(this));\n        }\n    }\n    async _createChatCompletion(completions, params, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_validateParams).call(this, params);\n        const chatCompletion = await completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal });\n        this._connected();\n        return this._addChatCompletion(chatCompletion);\n    }\n    async _runChatCompletion(completions, params, options) {\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        return await this._createChatCompletion(completions, params, options);\n    }\n    async _runFunctions(completions, params, options) {\n        const role = 'function';\n        const { function_call = 'auto', stream, ...restParams } = params;\n        const singleFunctionToCall = typeof function_call !== 'string' && function_call?.name;\n        const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};\n        const functionsByName = {};\n        for (const f of params.functions) {\n            functionsByName[f.name || f.function.name] = f;\n        }\n        const functions = params.functions.map((f) => ({\n            name: f.name || f.function.name,\n            parameters: f.parameters,\n            description: f.description,\n        }));\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        for (let i = 0; i < maxChatCompletions; ++i) {\n            const chatCompletion = await this._createChatCompletion(completions, {\n                ...restParams,\n                function_call,\n                functions,\n                messages: [...this.messages],\n            }, options);\n            const message = chatCompletion.choices[0]?.message;\n            if (!message) {\n                throw new error_1.OpenAIError(`missing message in ChatCompletion response`);\n            }\n            if (!message.function_call)\n                return;\n            const { name, arguments: args } = message.function_call;\n            const fn = functionsByName[name];\n            if (!fn) {\n                const content = `Invalid function_call: ${JSON.stringify(name)}. Available options are: ${functions\n                    .map((f) => JSON.stringify(f.name))\n                    .join(', ')}. Please try again`;\n                this._addMessage({ role, name, content });\n                continue;\n            }\n            else if (singleFunctionToCall && singleFunctionToCall !== name) {\n                const content = `Invalid function_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`;\n                this._addMessage({ role, name, content });\n                continue;\n            }\n            let parsed;\n            try {\n                parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args;\n            }\n            catch (error) {\n                this._addMessage({\n                    role,\n                    name,\n                    content: error instanceof Error ? error.message : String(error),\n                });\n                continue;\n            }\n            // @ts-expect-error it can't rule out `never` type.\n            const rawContent = await fn.function(parsed, this);\n            const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);\n            this._addMessage({ role, name, content });\n            if (singleFunctionToCall)\n                return;\n        }\n    }\n    async _runTools(completions, params, options) {\n        const role = 'tool';\n        const { tool_choice = 'auto', stream, ...restParams } = params;\n        const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name;\n        const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};\n        const functionsByName = {};\n        for (const f of params.tools) {\n            if (f.type === 'function') {\n                functionsByName[f.function.name || f.function.function.name] = f.function;\n            }\n        }\n        const tools = 'tools' in params ?\n            params.tools.map((t) => t.type === 'function' ?\n                {\n                    type: 'function',\n                    function: {\n                        name: t.function.name || t.function.function.name,\n                        parameters: t.function.parameters,\n                        description: t.function.description,\n                    },\n                }\n                : t)\n            : undefined;\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        for (let i = 0; i < maxChatCompletions; ++i) {\n            const chatCompletion = await this._createChatCompletion(completions, {\n                ...restParams,\n                tool_choice,\n                tools,\n                messages: [...this.messages],\n            }, options);\n            const message = chatCompletion.choices[0]?.message;\n            if (!message) {\n                throw new error_1.OpenAIError(`missing message in ChatCompletion response`);\n            }\n            if (!message.tool_calls)\n                return;\n            for (const tool_call of message.tool_calls) {\n                if (tool_call.type !== 'function')\n                    continue;\n                const tool_call_id = tool_call.id;\n                const { name, arguments: args } = tool_call.function;\n                const fn = functionsByName[name];\n                if (!fn) {\n                    const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${tools\n                        .map((f) => JSON.stringify(f.function.name))\n                        .join(', ')}. Please try again`;\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                else if (singleFunctionToCall && singleFunctionToCall !== name) {\n                    const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`;\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                let parsed;\n                try {\n                    parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args;\n                }\n                catch (error) {\n                    const content = error instanceof Error ? error.message : String(error);\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                // @ts-expect-error it can't rule out `never` type.\n                const rawContent = await fn.function(parsed, this);\n                const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);\n                this._addMessage({ role, tool_call_id, content });\n                if (singleFunctionToCall)\n                    return;\n            }\n        }\n    }\n}\nexports.AbstractChatCompletionRunner = AbstractChatCompletionRunner;\n_AbstractChatCompletionRunner_connectedPromise = new WeakMap(), _AbstractChatCompletionRunner_resolveConnectedPromise = new WeakMap(), _AbstractChatCompletionRunner_rejectConnectedPromise = new WeakMap(), _AbstractChatCompletionRunner_endPromise = new WeakMap(), _AbstractChatCompletionRunner_resolveEndPromise = new WeakMap(), _AbstractChatCompletionRunner_rejectEndPromise = new WeakMap(), _AbstractChatCompletionRunner_listeners = new WeakMap(), _AbstractChatCompletionRunner_ended = new WeakMap(), _AbstractChatCompletionRunner_errored = new WeakMap(), _AbstractChatCompletionRunner_aborted = new WeakMap(), _AbstractChatCompletionRunner_catchingPromiseCreated = new WeakMap(), _AbstractChatCompletionRunner_handleError = new WeakMap(), _AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() {\n    return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalMessage).call(this).content;\n}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() {\n    let i = this.messages.length;\n    while (i-- > 0) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message)) {\n            return message;\n        }\n    }\n    throw new error_1.OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant');\n}, _AbstractChatCompletionRunner_getFinalFunctionCall = function _AbstractChatCompletionRunner_getFinalFunctionCall() {\n    for (let i = this.messages.length - 1; i >= 0; i--) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message?.function_call) {\n            return message.function_call;\n        }\n    }\n    return;\n}, _AbstractChatCompletionRunner_getFinalFunctionCallResult = function _AbstractChatCompletionRunner_getFinalFunctionCallResult() {\n    for (let i = this.messages.length - 1; i >= 0; i--) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isFunctionMessage)(message) && message.content != null) {\n            return message.content;\n        }\n    }\n    return;\n}, _AbstractChatCompletionRunner_calculateTotalUsage = function _AbstractChatCompletionRunner_calculateTotalUsage() {\n    const total = {\n        completion_tokens: 0,\n        prompt_tokens: 0,\n        total_tokens: 0,\n    };\n    for (const { usage } of this._chatCompletions) {\n        if (usage) {\n            total.completion_tokens += usage.completion_tokens;\n            total.prompt_tokens += usage.prompt_tokens;\n            total.total_tokens += usage.total_tokens;\n        }\n    }\n    return total;\n}, _AbstractChatCompletionRunner_validateParams = function _AbstractChatCompletionRunner_validateParams(params) {\n    if (params.n != null && params.n > 1) {\n        throw new error_1.OpenAIError('ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.');\n    }\n}, _AbstractChatCompletionRunner_stringifyFunctionCallResult = function _AbstractChatCompletionRunner_stringifyFunctionCallResult(rawContent) {\n    return (typeof rawContent === 'string' ? rawContent\n        : rawContent === undefined ? 'undefined'\n            : JSON.stringify(rawContent));\n};\n//# sourceMappingURL=AbstractChatCompletionRunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionRunner = void 0;\nconst AbstractChatCompletionRunner_1 = require(\"./AbstractChatCompletionRunner.js\");\nconst chatCompletionUtils_1 = require(\"./chatCompletionUtils.js\");\nclass ChatCompletionRunner extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner {\n    static runFunctions(completions, params, options) {\n        const runner = new ChatCompletionRunner();\n        runner._run(() => runner._runFunctions(completions, params, options));\n        return runner;\n    }\n    static runTools(completions, params, options) {\n        const runner = new ChatCompletionRunner();\n        runner._run(() => runner._runTools(completions, params, options));\n        return runner;\n    }\n    _addMessage(message) {\n        super._addMessage(message);\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.content) {\n            this._emit('content', message.content);\n        }\n    }\n}\nexports.ChatCompletionRunner = ChatCompletionRunner;\n//# sourceMappingURL=ChatCompletionRunner.js.map","\"use strict\";\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _ChatCompletionStream_instances, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_addChunk, _ChatCompletionStream_endRequest, _ChatCompletionStream_accumulateChatCompletion;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionStream = void 0;\nconst error_1 = require(\"openai/error\");\nconst AbstractChatCompletionRunner_1 = require(\"./AbstractChatCompletionRunner.js\");\nconst streaming_1 = require(\"openai/streaming\");\nclass ChatCompletionStream extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner {\n    constructor() {\n        super(...arguments);\n        _ChatCompletionStream_instances.add(this);\n        _ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0);\n    }\n    get currentChatCompletionSnapshot() {\n        return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n    }\n    /**\n     * Intended for use on the frontend, consuming a stream produced with\n     * `.toReadableStream()` on the backend.\n     *\n     * Note that messages sent to the model do not appear in `.on('message')`\n     * in this context.\n     */\n    static fromReadableStream(stream) {\n        const runner = new ChatCompletionStream();\n        runner._run(() => runner._fromReadableStream(stream));\n        return runner;\n    }\n    static createChatCompletion(completions, params, options) {\n        const runner = new ChatCompletionStream();\n        runner._run(() => runner._runChatCompletion(completions, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));\n        return runner;\n    }\n    async _createChatCompletion(completions, params, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_beginRequest).call(this);\n        const stream = await completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });\n        this._connected();\n        for await (const chunk of stream) {\n            __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_addChunk).call(this, chunk);\n        }\n        if (stream.controller.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n    }\n    async _fromReadableStream(readableStream, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_beginRequest).call(this);\n        this._connected();\n        const stream = streaming_1.Stream.fromReadableStream(readableStream, this.controller);\n        let chatId;\n        for await (const chunk of stream) {\n            if (chatId && chatId !== chunk.id) {\n                // A new request has been made.\n                this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n            }\n            __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_addChunk).call(this, chunk);\n            chatId = chunk.id;\n        }\n        if (stream.controller.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n    }\n    [(_ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() {\n        if (this.ended)\n            return;\n        __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, \"f\");\n    }, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) {\n        if (this.ended)\n            return;\n        const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk);\n        this._emit('chunk', chunk, completion);\n        const delta = chunk.choices[0]?.delta?.content;\n        const snapshot = completion.choices[0]?.message;\n        if (delta != null && snapshot?.role === 'assistant' && snapshot?.content) {\n            this._emit('content', delta, snapshot.content);\n        }\n    }, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() {\n        if (this.ended) {\n            throw new error_1.OpenAIError(`stream has ended, this shouldn't happen`);\n        }\n        const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n        if (!snapshot) {\n            throw new error_1.OpenAIError(`request ended without sending any chunks`);\n        }\n        __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, \"f\");\n        return finalizeChatCompletion(snapshot);\n    }, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) {\n        var _a, _b;\n        let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n        const { choices, ...rest } = chunk;\n        if (!snapshot) {\n            snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, {\n                ...rest,\n                choices: [],\n            }, \"f\");\n        }\n        else {\n            Object.assign(snapshot, rest);\n        }\n        for (const { delta, finish_reason, index, ...other } of chunk.choices) {\n            let choice = snapshot.choices[index];\n            if (!choice) {\n                snapshot.choices[index] = { finish_reason, index, message: delta, ...other };\n                continue;\n            }\n            if (finish_reason)\n                choice.finish_reason = finish_reason;\n            Object.assign(choice, other);\n            if (!delta)\n                continue; // Shouldn't happen; just in case.\n            const { content, function_call, role, tool_calls } = delta;\n            if (content)\n                choice.message.content = (choice.message.content || '') + content;\n            if (role)\n                choice.message.role = role;\n            if (function_call) {\n                if (!choice.message.function_call) {\n                    choice.message.function_call = function_call;\n                }\n                else {\n                    if (function_call.name)\n                        choice.message.function_call.name = function_call.name;\n                    if (function_call.arguments) {\n                        (_a = choice.message.function_call).arguments ?? (_a.arguments = '');\n                        choice.message.function_call.arguments += function_call.arguments;\n                    }\n                }\n            }\n            if (tool_calls) {\n                if (!choice.message.tool_calls)\n                    choice.message.tool_calls = [];\n                for (const { index, id, type, function: fn } of tool_calls) {\n                    const tool_call = ((_b = choice.message.tool_calls)[index] ?? (_b[index] = {}));\n                    if (id)\n                        tool_call.id = id;\n                    if (type)\n                        tool_call.type = type;\n                    if (fn)\n                        tool_call.function ?? (tool_call.function = { arguments: '' });\n                    if (fn?.name)\n                        tool_call.function.name = fn.name;\n                    if (fn?.arguments)\n                        tool_call.function.arguments += fn.arguments;\n                }\n            }\n        }\n        return snapshot;\n    }, Symbol.asyncIterator)]() {\n        const pushQueue = [];\n        const readQueue = [];\n        let done = false;\n        this.on('chunk', (chunk) => {\n            const reader = readQueue.shift();\n            if (reader) {\n                reader(chunk);\n            }\n            else {\n                pushQueue.push(chunk);\n            }\n        });\n        this.on('end', () => {\n            done = true;\n            for (const reader of readQueue) {\n                reader(undefined);\n            }\n            readQueue.length = 0;\n        });\n        return {\n            next: async () => {\n                if (!pushQueue.length) {\n                    if (done) {\n                        return { value: undefined, done: true };\n                    }\n                    return new Promise((resolve) => readQueue.push(resolve)).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));\n                }\n                const chunk = pushQueue.shift();\n                return { value: chunk, done: false };\n            },\n        };\n    }\n    toReadableStream() {\n        const stream = new streaming_1.Stream(this[Symbol.asyncIterator].bind(this), this.controller);\n        return stream.toReadableStream();\n    }\n}\nexports.ChatCompletionStream = ChatCompletionStream;\nfunction finalizeChatCompletion(snapshot) {\n    const { id, choices, created, model } = snapshot;\n    return {\n        id,\n        choices: choices.map(({ message, finish_reason, index }) => {\n            if (!finish_reason)\n                throw new error_1.OpenAIError(`missing finish_reason for choice ${index}`);\n            const { content = null, function_call, tool_calls } = message;\n            const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine.\n            if (!role)\n                throw new error_1.OpenAIError(`missing role for choice ${index}`);\n            if (function_call) {\n                const { arguments: args, name } = function_call;\n                if (args == null)\n                    throw new error_1.OpenAIError(`missing function_call.arguments for choice ${index}`);\n                if (!name)\n                    throw new error_1.OpenAIError(`missing function_call.name for choice ${index}`);\n                return { message: { content, function_call: { arguments: args, name }, role }, finish_reason, index };\n            }\n            if (tool_calls) {\n                return {\n                    index,\n                    finish_reason,\n                    message: {\n                        role,\n                        content,\n                        tool_calls: tool_calls.map((tool_call, i) => {\n                            const { function: fn, type, id } = tool_call;\n                            const { arguments: args, name } = fn || {};\n                            if (id == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].id\\n${str(snapshot)}`);\n                            if (type == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].type\\n${str(snapshot)}`);\n                            if (name == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\\n${str(snapshot)}`);\n                            if (args == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\\n${str(snapshot)}`);\n                            return { id, type, function: { name, arguments: args } };\n                        }),\n                    },\n                };\n            }\n            return { message: { content: content, role }, finish_reason, index };\n        }),\n        created,\n        model,\n        object: 'chat.completion',\n    };\n}\nfunction str(x) {\n    return JSON.stringify(x);\n}\n//# sourceMappingURL=ChatCompletionStream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionStreamingRunner = void 0;\nconst ChatCompletionStream_1 = require(\"./ChatCompletionStream.js\");\nclass ChatCompletionStreamingRunner extends ChatCompletionStream_1.ChatCompletionStream {\n    static fromReadableStream(stream) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._fromReadableStream(stream));\n        return runner;\n    }\n    static runFunctions(completions, params, options) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._runFunctions(completions, params, {\n            ...options,\n            headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' },\n        }));\n        return runner;\n    }\n    static runTools(completions, params, options) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._runTools(completions, params, {\n            ...options,\n            headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' },\n        }));\n        return runner;\n    }\n}\nexports.ChatCompletionStreamingRunner = ChatCompletionStreamingRunner;\n//# sourceMappingURL=ChatCompletionStreamingRunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ParsingFunction = exports.isRunnableFunctionWithParse = void 0;\nfunction isRunnableFunctionWithParse(fn) {\n    return typeof fn.parse === 'function';\n}\nexports.isRunnableFunctionWithParse = isRunnableFunctionWithParse;\n/**\n * This is helper class for passing a `function` and `parse` where the `function`\n * argument type matches the `parse` return type.\n */\nclass ParsingFunction {\n    constructor(input) {\n        this.function = input.function;\n        this.parse = input.parse;\n        this.parameters = input.parameters;\n        this.description = input.description;\n        this.name = input.name;\n    }\n}\nexports.ParsingFunction = ParsingFunction;\n//# sourceMappingURL=RunnableFunction.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isPresent = exports.isToolMessage = exports.isFunctionMessage = exports.isAssistantMessage = void 0;\nconst isAssistantMessage = (message) => {\n    return message?.role === 'assistant';\n};\nexports.isAssistantMessage = isAssistantMessage;\nconst isFunctionMessage = (message) => {\n    return message?.role === 'function';\n};\nexports.isFunctionMessage = isFunctionMessage;\nconst isToolMessage = (message) => {\n    return message?.role === 'tool';\n};\nexports.isToolMessage = isToolMessage;\nfunction isPresent(obj) {\n    return obj != null;\n}\nexports.isPresent = isPresent;\n//# sourceMappingURL=chatCompletionUtils.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CursorPage = exports.Page = void 0;\nconst core_1 = require(\"./core.js\");\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass Page extends core_1.AbstractPage {\n    constructor(client, response, body, options) {\n        super(client, response, body, options);\n        this.data = body.data;\n        this.object = body.object;\n    }\n    getPaginatedItems() {\n        return this.data;\n    }\n    // @deprecated Please use `nextPageInfo()` instead\n    /**\n     * This page represents a response that isn't actually paginated at the API level\n     * so there will never be any next page params.\n     */\n    nextPageParams() {\n        return null;\n    }\n    nextPageInfo() {\n        return null;\n    }\n}\nexports.Page = Page;\nclass CursorPage extends core_1.AbstractPage {\n    constructor(client, response, body, options) {\n        super(client, response, body, options);\n        this.data = body.data;\n    }\n    getPaginatedItems() {\n        return this.data;\n    }\n    // @deprecated Please use `nextPageInfo()` instead\n    nextPageParams() {\n        const info = this.nextPageInfo();\n        if (!info)\n            return null;\n        if ('params' in info)\n            return info.params;\n        const params = Object.fromEntries(info.url.searchParams);\n        if (!Object.keys(params).length)\n            return null;\n        return params;\n    }\n    nextPageInfo() {\n        if (!this.data?.length) {\n            return null;\n        }\n        const next = this.data[this.data.length - 1]?.id;\n        if (!next)\n            return null;\n        return { params: { after: next } };\n    }\n}\nexports.CursorPage = CursorPage;\n//# sourceMappingURL=pagination.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.APIResource = void 0;\nclass APIResource {\n    constructor(client) {\n        this._client = client;\n    }\n}\nexports.APIResource = APIResource;\n//# sourceMappingURL=resource.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Audio = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst SpeechAPI = __importStar(require(\"openai/resources/audio/speech\"));\nconst TranscriptionsAPI = __importStar(require(\"openai/resources/audio/transcriptions\"));\nconst TranslationsAPI = __importStar(require(\"openai/resources/audio/translations\"));\nclass Audio extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.transcriptions = new TranscriptionsAPI.Transcriptions(this._client);\n        this.translations = new TranslationsAPI.Translations(this._client);\n        this.speech = new SpeechAPI.Speech(this._client);\n    }\n}\nexports.Audio = Audio;\n(function (Audio) {\n    Audio.Transcriptions = TranscriptionsAPI.Transcriptions;\n    Audio.Translations = TranslationsAPI.Translations;\n    Audio.Speech = SpeechAPI.Speech;\n})(Audio = exports.Audio || (exports.Audio = {}));\n//# sourceMappingURL=audio.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Speech = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Speech extends resource_1.APIResource {\n    /**\n     * Generates audio from the input text.\n     */\n    create(body, options) {\n        return this._client.post('/audio/speech', { body, ...options, __binaryResponse: true });\n    }\n}\nexports.Speech = Speech;\n(function (Speech) {\n})(Speech = exports.Speech || (exports.Speech = {}));\n//# sourceMappingURL=speech.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Transcriptions = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Transcriptions extends resource_1.APIResource {\n    /**\n     * Transcribes audio into the input language.\n     */\n    create(body, options) {\n        return this._client.post('/audio/transcriptions', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n}\nexports.Transcriptions = Transcriptions;\n(function (Transcriptions) {\n})(Transcriptions = exports.Transcriptions || (exports.Transcriptions = {}));\n//# sourceMappingURL=transcriptions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Translations = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Translations extends resource_1.APIResource {\n    /**\n     * Translates audio into English.\n     */\n    create(body, options) {\n        return this._client.post('/audio/translations', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n}\nexports.Translations = Translations;\n(function (Translations) {\n})(Translations = exports.Translations || (exports.Translations = {}));\n//# sourceMappingURL=translations.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AssistantsPage = exports.Assistants = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst AssistantsAPI = __importStar(require(\"openai/resources/beta/assistants/assistants\"));\nconst FilesAPI = __importStar(require(\"openai/resources/beta/assistants/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Assistants extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.files = new FilesAPI.Files(this._client);\n    }\n    /**\n     * Create an assistant with a model and instructions.\n     */\n    create(body, options) {\n        return this._client.post('/assistants', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves an assistant.\n     */\n    retrieve(assistantId, options) {\n        return this._client.get(`/assistants/${assistantId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies an assistant.\n     */\n    update(assistantId, body, options) {\n        return this._client.post(`/assistants/${assistantId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/assistants', AssistantsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete an assistant.\n     */\n    del(assistantId, options) {\n        return this._client.delete(`/assistants/${assistantId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Assistants = Assistants;\nclass AssistantsPage extends pagination_1.CursorPage {\n}\nexports.AssistantsPage = AssistantsPage;\n(function (Assistants) {\n    Assistants.AssistantsPage = AssistantsAPI.AssistantsPage;\n    Assistants.Files = FilesAPI.Files;\n    Assistants.AssistantFilesPage = FilesAPI.AssistantFilesPage;\n})(Assistants = exports.Assistants || (exports.Assistants = {}));\n//# sourceMappingURL=assistants.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AssistantFilesPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst FilesAPI = __importStar(require(\"openai/resources/beta/assistants/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Create an assistant file by attaching a\n     * [File](https://platform.openai.com/docs/api-reference/files) to an\n     * [assistant](https://platform.openai.com/docs/api-reference/assistants).\n     */\n    create(assistantId, body, options) {\n        return this._client.post(`/assistants/${assistantId}/files`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves an AssistantFile.\n     */\n    retrieve(assistantId, fileId, options) {\n        return this._client.get(`/assistants/${assistantId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(assistantId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(assistantId, {}, query);\n        }\n        return this._client.getAPIList(`/assistants/${assistantId}/files`, AssistantFilesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete an assistant file.\n     */\n    del(assistantId, fileId, options) {\n        return this._client.delete(`/assistants/${assistantId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Files = Files;\nclass AssistantFilesPage extends pagination_1.CursorPage {\n}\nexports.AssistantFilesPage = AssistantFilesPage;\n(function (Files) {\n    Files.AssistantFilesPage = FilesAPI.AssistantFilesPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Beta = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst AssistantsAPI = __importStar(require(\"openai/resources/beta/assistants/assistants\"));\nconst ChatAPI = __importStar(require(\"openai/resources/beta/chat/chat\"));\nconst ThreadsAPI = __importStar(require(\"openai/resources/beta/threads/threads\"));\nclass Beta extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.chat = new ChatAPI.Chat(this._client);\n        this.assistants = new AssistantsAPI.Assistants(this._client);\n        this.threads = new ThreadsAPI.Threads(this._client);\n    }\n}\nexports.Beta = Beta;\n(function (Beta) {\n    Beta.Chat = ChatAPI.Chat;\n    Beta.Assistants = AssistantsAPI.Assistants;\n    Beta.AssistantsPage = AssistantsAPI.AssistantsPage;\n    Beta.Threads = ThreadsAPI.Threads;\n})(Beta = exports.Beta || (exports.Beta = {}));\n//# sourceMappingURL=beta.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Chat = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst CompletionsAPI = __importStar(require(\"openai/resources/beta/chat/completions\"));\nclass Chat extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.completions = new CompletionsAPI.Completions(this._client);\n    }\n}\nexports.Chat = Chat;\n(function (Chat) {\n    Chat.Completions = CompletionsAPI.Completions;\n})(Chat = exports.Chat || (exports.Chat = {}));\n//# sourceMappingURL=chat.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = exports.ChatCompletionStream = exports.ParsingFunction = exports.ChatCompletionStreamingRunner = exports.ChatCompletionRunner = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst ChatCompletionRunner_1 = require(\"openai/lib/ChatCompletionRunner\");\nvar ChatCompletionRunner_2 = require(\"openai/lib/ChatCompletionRunner\");\nObject.defineProperty(exports, \"ChatCompletionRunner\", { enumerable: true, get: function () { return ChatCompletionRunner_2.ChatCompletionRunner; } });\nconst ChatCompletionStreamingRunner_1 = require(\"openai/lib/ChatCompletionStreamingRunner\");\nvar ChatCompletionStreamingRunner_2 = require(\"openai/lib/ChatCompletionStreamingRunner\");\nObject.defineProperty(exports, \"ChatCompletionStreamingRunner\", { enumerable: true, get: function () { return ChatCompletionStreamingRunner_2.ChatCompletionStreamingRunner; } });\nvar RunnableFunction_1 = require(\"openai/lib/RunnableFunction\");\nObject.defineProperty(exports, \"ParsingFunction\", { enumerable: true, get: function () { return RunnableFunction_1.ParsingFunction; } });\nconst ChatCompletionStream_1 = require(\"openai/lib/ChatCompletionStream\");\nvar ChatCompletionStream_2 = require(\"openai/lib/ChatCompletionStream\");\nObject.defineProperty(exports, \"ChatCompletionStream\", { enumerable: true, get: function () { return ChatCompletionStream_2.ChatCompletionStream; } });\nclass Completions extends resource_1.APIResource {\n    runFunctions(body, options) {\n        if (body.stream) {\n            return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runFunctions(this._client.chat.completions, body, options);\n        }\n        return ChatCompletionRunner_1.ChatCompletionRunner.runFunctions(this._client.chat.completions, body, options);\n    }\n    runTools(body, options) {\n        if (body.stream) {\n            return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runTools(this._client.chat.completions, body, options);\n        }\n        return ChatCompletionRunner_1.ChatCompletionRunner.runTools(this._client.chat.completions, body, options);\n    }\n    /**\n     * Creates a chat completion stream\n     */\n    stream(body, options) {\n        return ChatCompletionStream_1.ChatCompletionStream.createChatCompletion(this._client.chat.completions, body, options);\n    }\n}\nexports.Completions = Completions;\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MessageFilesPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst FilesAPI = __importStar(require(\"openai/resources/beta/threads/messages/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Retrieves a message file.\n     */\n    retrieve(threadId, messageId, fileId, options) {\n        return this._client.get(`/threads/${threadId}/messages/${messageId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, messageId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, messageId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/messages/${messageId}/files`, MessageFilesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Files = Files;\nclass MessageFilesPage extends pagination_1.CursorPage {\n}\nexports.MessageFilesPage = MessageFilesPage;\n(function (Files) {\n    Files.MessageFilesPage = FilesAPI.MessageFilesPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ThreadMessagesPage = exports.Messages = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst MessagesAPI = __importStar(require(\"openai/resources/beta/threads/messages/messages\"));\nconst FilesAPI = __importStar(require(\"openai/resources/beta/threads/messages/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Messages extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.files = new FilesAPI.Files(this._client);\n    }\n    /**\n     * Create a message.\n     */\n    create(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}/messages`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieve a message.\n     */\n    retrieve(threadId, messageId, options) {\n        return this._client.get(`/threads/${threadId}/messages/${messageId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a message.\n     */\n    update(threadId, messageId, body, options) {\n        return this._client.post(`/threads/${threadId}/messages/${messageId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/messages`, ThreadMessagesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Messages = Messages;\nclass ThreadMessagesPage extends pagination_1.CursorPage {\n}\nexports.ThreadMessagesPage = ThreadMessagesPage;\n(function (Messages) {\n    Messages.ThreadMessagesPage = MessagesAPI.ThreadMessagesPage;\n    Messages.Files = FilesAPI.Files;\n    Messages.MessageFilesPage = FilesAPI.MessageFilesPage;\n})(Messages = exports.Messages || (exports.Messages = {}));\n//# sourceMappingURL=messages.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RunsPage = exports.Runs = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst RunsAPI = __importStar(require(\"openai/resources/beta/threads/runs/runs\"));\nconst StepsAPI = __importStar(require(\"openai/resources/beta/threads/runs/steps\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Runs extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.steps = new StepsAPI.Steps(this._client);\n    }\n    /**\n     * Create a run.\n     */\n    create(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves a run.\n     */\n    retrieve(threadId, runId, options) {\n        return this._client.get(`/threads/${threadId}/runs/${runId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a run.\n     */\n    update(threadId, runId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Cancels a run that is `in_progress`.\n     */\n    cancel(threadId, runId, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * When a run has the `status: \"requires_action\"` and `required_action.type` is\n     * `submit_tool_outputs`, this endpoint can be used to submit the outputs from the\n     * tool calls once they're all completed. All outputs must be submitted in a single\n     * request.\n     */\n    submitToolOutputs(threadId, runId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Runs = Runs;\nclass RunsPage extends pagination_1.CursorPage {\n}\nexports.RunsPage = RunsPage;\n(function (Runs) {\n    Runs.RunsPage = RunsAPI.RunsPage;\n    Runs.Steps = StepsAPI.Steps;\n    Runs.RunStepsPage = StepsAPI.RunStepsPage;\n})(Runs = exports.Runs || (exports.Runs = {}));\n//# sourceMappingURL=runs.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RunStepsPage = exports.Steps = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst StepsAPI = __importStar(require(\"openai/resources/beta/threads/runs/steps\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Steps extends resource_1.APIResource {\n    /**\n     * Retrieves a run step.\n     */\n    retrieve(threadId, runId, stepId, options) {\n        return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, runId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, runId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/runs/${runId}/steps`, RunStepsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Steps = Steps;\nclass RunStepsPage extends pagination_1.CursorPage {\n}\nexports.RunStepsPage = RunStepsPage;\n(function (Steps) {\n    Steps.RunStepsPage = StepsAPI.RunStepsPage;\n})(Steps = exports.Steps || (exports.Steps = {}));\n//# sourceMappingURL=steps.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Threads = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst MessagesAPI = __importStar(require(\"openai/resources/beta/threads/messages/messages\"));\nconst RunsAPI = __importStar(require(\"openai/resources/beta/threads/runs/runs\"));\nclass Threads extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.runs = new RunsAPI.Runs(this._client);\n        this.messages = new MessagesAPI.Messages(this._client);\n    }\n    create(body = {}, options) {\n        if ((0, core_1.isRequestOptions)(body)) {\n            return this.create({}, body);\n        }\n        return this._client.post('/threads', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves a thread.\n     */\n    retrieve(threadId, options) {\n        return this._client.get(`/threads/${threadId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a thread.\n     */\n    update(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete a thread.\n     */\n    del(threadId, options) {\n        return this._client.delete(`/threads/${threadId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Create a thread and run it in one request.\n     */\n    createAndRun(body, options) {\n        return this._client.post('/threads/runs', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Threads = Threads;\n(function (Threads) {\n    Threads.Runs = RunsAPI.Runs;\n    Threads.RunsPage = RunsAPI.RunsPage;\n    Threads.Messages = MessagesAPI.Messages;\n    Threads.ThreadMessagesPage = MessagesAPI.ThreadMessagesPage;\n})(Threads = exports.Threads || (exports.Threads = {}));\n//# sourceMappingURL=threads.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Chat = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst CompletionsAPI = __importStar(require(\"openai/resources/chat/completions\"));\nclass Chat extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.completions = new CompletionsAPI.Completions(this._client);\n    }\n}\nexports.Chat = Chat;\n(function (Chat) {\n    Chat.Completions = CompletionsAPI.Completions;\n})(Chat = exports.Chat || (exports.Chat = {}));\n//# sourceMappingURL=chat.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Completions extends resource_1.APIResource {\n    create(body, options) {\n        return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false });\n    }\n}\nexports.Completions = Completions;\n(function (Completions) {\n})(Completions = exports.Completions || (exports.Completions = {}));\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = exports.Chat = void 0;\nvar chat_1 = require(\"./chat.js\");\nObject.defineProperty(exports, \"Chat\", { enumerable: true, get: function () { return chat_1.Chat; } });\nvar completions_1 = require(\"./completions.js\");\nObject.defineProperty(exports, \"Completions\", { enumerable: true, get: function () { return completions_1.Completions; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Completions extends resource_1.APIResource {\n    create(body, options) {\n        return this._client.post('/completions', { body, ...options, stream: body.stream ?? false });\n    }\n}\nexports.Completions = Completions;\n(function (Completions) {\n})(Completions = exports.Completions || (exports.Completions = {}));\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Edits = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Edits extends resource_1.APIResource {\n    /**\n     * Creates a new edit for the provided input, instruction, and parameters.\n     *\n     * @deprecated The Edits API is deprecated; please use Chat Completions instead.\n     *\n     * https://openai.com/blog/gpt-4-api-general-availability#deprecation-of-the-edits-api\n     */\n    create(body, options) {\n        return this._client.post('/edits', { body, ...options });\n    }\n}\nexports.Edits = Edits;\n(function (Edits) {\n})(Edits = exports.Edits || (exports.Edits = {}));\n//# sourceMappingURL=edits.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Embeddings = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Embeddings extends resource_1.APIResource {\n    /**\n     * Creates an embedding vector representing the input text.\n     */\n    create(body, options) {\n        return this._client.post('/embeddings', { body, ...options });\n    }\n}\nexports.Embeddings = Embeddings;\n(function (Embeddings) {\n})(Embeddings = exports.Embeddings || (exports.Embeddings = {}));\n//# sourceMappingURL=embeddings.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FileObjectsPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst core_2 = require(\"openai/core\");\nconst error_1 = require(\"openai/error\");\nconst FilesAPI = __importStar(require(\"openai/resources/files\"));\nconst core_3 = require(\"openai/core\");\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Upload a file that can be used across various endpoints/features. The size of\n     * all the files uploaded by one organization can be up to 100 GB.\n     *\n     * The size of individual files for can be a maximum of 512MB. See the\n     * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) to\n     * learn more about the types of files supported. The Fine-tuning API only supports\n     * `.jsonl` files.\n     *\n     * Please [contact us](https://help.openai.com/) if you need to increase these\n     * storage limits.\n     */\n    create(body, options) {\n        return this._client.post('/files', (0, core_3.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Returns information about a specific file.\n     */\n    retrieve(fileId, options) {\n        return this._client.get(`/files/${fileId}`, options);\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/files', FileObjectsPage, { query, ...options });\n    }\n    /**\n     * Delete a file.\n     */\n    del(fileId, options) {\n        return this._client.delete(`/files/${fileId}`, options);\n    }\n    /**\n     * Returns the contents of the specified file.\n     */\n    content(fileId, options) {\n        return this._client.get(`/files/${fileId}/content`, { ...options, __binaryResponse: true });\n    }\n    /**\n     * Returns the contents of the specified file.\n     *\n     * @deprecated The `.content()` method should be used instead\n     */\n    retrieveContent(fileId, options) {\n        return this._client.get(`/files/${fileId}/content`, {\n            ...options,\n            headers: { Accept: 'application/json', ...options?.headers },\n        });\n    }\n    /**\n     * Waits for the given file to be processed, default timeout is 30 mins.\n     */\n    async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) {\n        const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']);\n        const start = Date.now();\n        let file = await this.retrieve(id);\n        while (!file.status || !TERMINAL_STATES.has(file.status)) {\n            await (0, core_2.sleep)(pollInterval);\n            file = await this.retrieve(id);\n            if (Date.now() - start > maxWait) {\n                throw new error_1.APIConnectionTimeoutError({\n                    message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`,\n                });\n            }\n        }\n        return file;\n    }\n}\nexports.Files = Files;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass FileObjectsPage extends pagination_1.Page {\n}\nexports.FileObjectsPage = FileObjectsPage;\n(function (Files) {\n    Files.FileObjectsPage = FilesAPI.FileObjectsPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTunesPage = exports.FineTunes = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst FineTunesAPI = __importStar(require(\"openai/resources/fine-tunes\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass FineTunes extends resource_1.APIResource {\n    /**\n     * Creates a job that fine-tunes a specified model from a given dataset.\n     *\n     * Response includes details of the enqueued job including job status and the name\n     * of the fine-tuned models once complete.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/legacy-fine-tuning)\n     */\n    create(body, options) {\n        return this._client.post('/fine-tunes', { body, ...options });\n    }\n    /**\n     * Gets info about the fine-tune job.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/legacy-fine-tuning)\n     */\n    retrieve(fineTuneId, options) {\n        return this._client.get(`/fine-tunes/${fineTuneId}`, options);\n    }\n    /**\n     * List your organization's fine-tuning jobs\n     */\n    list(options) {\n        return this._client.getAPIList('/fine-tunes', FineTunesPage, options);\n    }\n    /**\n     * Immediately cancel a fine-tune job.\n     */\n    cancel(fineTuneId, options) {\n        return this._client.post(`/fine-tunes/${fineTuneId}/cancel`, options);\n    }\n    listEvents(fineTuneId, query, options) {\n        return this._client.get(`/fine-tunes/${fineTuneId}/events`, {\n            query,\n            timeout: 86400000,\n            ...options,\n            stream: query?.stream ?? false,\n        });\n    }\n}\nexports.FineTunes = FineTunes;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass FineTunesPage extends pagination_1.Page {\n}\nexports.FineTunesPage = FineTunesPage;\n(function (FineTunes) {\n    FineTunes.FineTunesPage = FineTunesAPI.FineTunesPage;\n})(FineTunes = exports.FineTunes || (exports.FineTunes = {}));\n//# sourceMappingURL=fine-tunes.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTuning = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst JobsAPI = __importStar(require(\"openai/resources/fine-tuning/jobs\"));\nclass FineTuning extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.jobs = new JobsAPI.Jobs(this._client);\n    }\n}\nexports.FineTuning = FineTuning;\n(function (FineTuning) {\n    FineTuning.Jobs = JobsAPI.Jobs;\n    FineTuning.FineTuningJobsPage = JobsAPI.FineTuningJobsPage;\n    FineTuning.FineTuningJobEventsPage = JobsAPI.FineTuningJobEventsPage;\n})(FineTuning = exports.FineTuning || (exports.FineTuning = {}));\n//# sourceMappingURL=fine-tuning.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTuningJobEventsPage = exports.FineTuningJobsPage = exports.Jobs = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst JobsAPI = __importStar(require(\"openai/resources/fine-tuning/jobs\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Jobs extends resource_1.APIResource {\n    /**\n     * Creates a job that fine-tunes a specified model from a given dataset.\n     *\n     * Response includes details of the enqueued job including job status and the name\n     * of the fine-tuned models once complete.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)\n     */\n    create(body, options) {\n        return this._client.post('/fine_tuning/jobs', { body, ...options });\n    }\n    /**\n     * Get info about a fine-tuning job.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)\n     */\n    retrieve(fineTuningJobId, options) {\n        return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options);\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options });\n    }\n    /**\n     * Immediately cancel a fine-tune job.\n     */\n    cancel(fineTuningJobId, options) {\n        return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options);\n    }\n    listEvents(fineTuningJobId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.listEvents(fineTuningJobId, {}, query);\n        }\n        return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, {\n            query,\n            ...options,\n        });\n    }\n}\nexports.Jobs = Jobs;\nclass FineTuningJobsPage extends pagination_1.CursorPage {\n}\nexports.FineTuningJobsPage = FineTuningJobsPage;\nclass FineTuningJobEventsPage extends pagination_1.CursorPage {\n}\nexports.FineTuningJobEventsPage = FineTuningJobEventsPage;\n(function (Jobs) {\n    Jobs.FineTuningJobsPage = JobsAPI.FineTuningJobsPage;\n    Jobs.FineTuningJobEventsPage = JobsAPI.FineTuningJobEventsPage;\n})(Jobs = exports.Jobs || (exports.Jobs = {}));\n//# sourceMappingURL=jobs.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Images = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Images extends resource_1.APIResource {\n    /**\n     * Creates a variation of a given image.\n     */\n    createVariation(body, options) {\n        return this._client.post('/images/variations', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Creates an edited or extended image given an original image and a prompt.\n     */\n    edit(body, options) {\n        return this._client.post('/images/edits', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Creates an image given a prompt.\n     */\n    generate(body, options) {\n        return this._client.post('/images/generations', { body, ...options });\n    }\n}\nexports.Images = Images;\n(function (Images) {\n})(Images = exports.Images || (exports.Images = {}));\n//# sourceMappingURL=images.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n    for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Moderations = exports.Models = exports.ModelsPage = exports.Images = exports.FineTuning = exports.FineTunes = exports.FineTunesPage = exports.Files = exports.FileObjectsPage = exports.Edits = exports.Embeddings = exports.Completions = exports.Beta = exports.Audio = void 0;\n__exportStar(require(\"./chat/index.js\"), exports);\n__exportStar(require(\"./shared.js\"), exports);\nvar audio_1 = require(\"./audio/audio.js\");\nObject.defineProperty(exports, \"Audio\", { enumerable: true, get: function () { return audio_1.Audio; } });\nvar beta_1 = require(\"./beta/beta.js\");\nObject.defineProperty(exports, \"Beta\", { enumerable: true, get: function () { return beta_1.Beta; } });\nvar completions_1 = require(\"./completions.js\");\nObject.defineProperty(exports, \"Completions\", { enumerable: true, get: function () { return completions_1.Completions; } });\nvar embeddings_1 = require(\"./embeddings.js\");\nObject.defineProperty(exports, \"Embeddings\", { enumerable: true, get: function () { return embeddings_1.Embeddings; } });\nvar edits_1 = require(\"./edits.js\");\nObject.defineProperty(exports, \"Edits\", { enumerable: true, get: function () { return edits_1.Edits; } });\nvar files_1 = require(\"./files.js\");\nObject.defineProperty(exports, \"FileObjectsPage\", { enumerable: true, get: function () { return files_1.FileObjectsPage; } });\nObject.defineProperty(exports, \"Files\", { enumerable: true, get: function () { return files_1.Files; } });\nvar fine_tunes_1 = require(\"./fine-tunes.js\");\nObject.defineProperty(exports, \"FineTunesPage\", { enumerable: true, get: function () { return fine_tunes_1.FineTunesPage; } });\nObject.defineProperty(exports, \"FineTunes\", { enumerable: true, get: function () { return fine_tunes_1.FineTunes; } });\nvar fine_tuning_1 = require(\"./fine-tuning/fine-tuning.js\");\nObject.defineProperty(exports, \"FineTuning\", { enumerable: true, get: function () { return fine_tuning_1.FineTuning; } });\nvar images_1 = require(\"./images.js\");\nObject.defineProperty(exports, \"Images\", { enumerable: true, get: function () { return images_1.Images; } });\nvar models_1 = require(\"./models.js\");\nObject.defineProperty(exports, \"ModelsPage\", { enumerable: true, get: function () { return models_1.ModelsPage; } });\nObject.defineProperty(exports, \"Models\", { enumerable: true, get: function () { return models_1.Models; } });\nvar moderations_1 = require(\"./moderations.js\");\nObject.defineProperty(exports, \"Moderations\", { enumerable: true, get: function () { return moderations_1.Moderations; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ModelsPage = exports.Models = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst ModelsAPI = __importStar(require(\"openai/resources/models\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Models extends resource_1.APIResource {\n    /**\n     * Retrieves a model instance, providing basic information about the model such as\n     * the owner and permissioning.\n     */\n    retrieve(model, options) {\n        return this._client.get(`/models/${model}`, options);\n    }\n    /**\n     * Lists the currently available models, and provides basic information about each\n     * one such as the owner and availability.\n     */\n    list(options) {\n        return this._client.getAPIList('/models', ModelsPage, options);\n    }\n    /**\n     * Delete a fine-tuned model. You must have the Owner role in your organization to\n     * delete a model.\n     */\n    del(model, options) {\n        return this._client.delete(`/models/${model}`, options);\n    }\n}\nexports.Models = Models;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass ModelsPage extends pagination_1.Page {\n}\nexports.ModelsPage = ModelsPage;\n(function (Models) {\n    Models.ModelsPage = ModelsAPI.ModelsPage;\n})(Models = exports.Models || (exports.Models = {}));\n//# sourceMappingURL=models.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Moderations = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Moderations extends resource_1.APIResource {\n    /**\n     * Classifies if text violates OpenAI's Content Policy\n     */\n    create(body, options) {\n        return this._client.post('/moderations', { body, ...options });\n    }\n}\nexports.Moderations = Moderations;\n(function (Moderations) {\n})(Moderations = exports.Moderations || (exports.Moderations = {}));\n//# sourceMappingURL=moderations.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=shared.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Stream = void 0;\nconst index_1 = require(\"./_shims/index.js\");\nconst error_1 = require(\"./error.js\");\nconst error_2 = require(\"openai/error\");\nclass Stream {\n    constructor(iterator, controller) {\n        this.iterator = iterator;\n        this.controller = controller;\n    }\n    static fromSSEResponse(response, controller) {\n        let consumed = false;\n        const decoder = new SSEDecoder();\n        async function* iterMessages() {\n            if (!response.body) {\n                controller.abort();\n                throw new error_1.OpenAIError(`Attempted to iterate over a response with no body`);\n            }\n            const lineDecoder = new LineDecoder();\n            const iter = readableStreamAsyncIterable(response.body);\n            for await (const chunk of iter) {\n                for (const line of lineDecoder.decode(chunk)) {\n                    const sse = decoder.decode(line);\n                    if (sse)\n                        yield sse;\n                }\n            }\n            for (const line of lineDecoder.flush()) {\n                const sse = decoder.decode(line);\n                if (sse)\n                    yield sse;\n            }\n        }\n        async function* iterator() {\n            if (consumed) {\n                throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');\n            }\n            consumed = true;\n            let done = false;\n            try {\n                for await (const sse of iterMessages()) {\n                    if (done)\n                        continue;\n                    if (sse.data.startsWith('[DONE]')) {\n                        done = true;\n                        continue;\n                    }\n                    if (sse.event === null) {\n                        let data;\n                        try {\n                            data = JSON.parse(sse.data);\n                        }\n                        catch (e) {\n                            console.error(`Could not parse message into JSON:`, sse.data);\n                            console.error(`From chunk:`, sse.raw);\n                            throw e;\n                        }\n                        if (data && data.error) {\n                            throw new error_2.APIError(undefined, data.error, undefined, undefined);\n                        }\n                        yield data;\n                    }\n                }\n                done = true;\n            }\n            catch (e) {\n                // If the user calls `stream.controller.abort()`, we should exit without throwing.\n                if (e instanceof Error && e.name === 'AbortError')\n                    return;\n                throw e;\n            }\n            finally {\n                // If the user `break`s, abort the ongoing request.\n                if (!done)\n                    controller.abort();\n            }\n        }\n        return new Stream(iterator, controller);\n    }\n    /**\n     * Generates a Stream from a newline-separated ReadableStream\n     * where each item is a JSON value.\n     */\n    static fromReadableStream(readableStream, controller) {\n        let consumed = false;\n        async function* iterLines() {\n            const lineDecoder = new LineDecoder();\n            const iter = readableStreamAsyncIterable(readableStream);\n            for await (const chunk of iter) {\n                for (const line of lineDecoder.decode(chunk)) {\n                    yield line;\n                }\n            }\n            for (const line of lineDecoder.flush()) {\n                yield line;\n            }\n        }\n        async function* iterator() {\n            if (consumed) {\n                throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');\n            }\n            consumed = true;\n            let done = false;\n            try {\n                for await (const line of iterLines()) {\n                    if (done)\n                        continue;\n                    if (line)\n                        yield JSON.parse(line);\n                }\n                done = true;\n            }\n            catch (e) {\n                // If the user calls `stream.controller.abort()`, we should exit without throwing.\n                if (e instanceof Error && e.name === 'AbortError')\n                    return;\n                throw e;\n            }\n            finally {\n                // If the user `break`s, abort the ongoing request.\n                if (!done)\n                    controller.abort();\n            }\n        }\n        return new Stream(iterator, controller);\n    }\n    [Symbol.asyncIterator]() {\n        return this.iterator();\n    }\n    /**\n     * Splits the stream into two streams which can be\n     * independently read from at different speeds.\n     */\n    tee() {\n        const left = [];\n        const right = [];\n        const iterator = this.iterator();\n        const teeIterator = (queue) => {\n            return {\n                next: () => {\n                    if (queue.length === 0) {\n                        const result = iterator.next();\n                        left.push(result);\n                        right.push(result);\n                    }\n                    return queue.shift();\n                },\n            };\n        };\n        return [\n            new Stream(() => teeIterator(left), this.controller),\n            new Stream(() => teeIterator(right), this.controller),\n        ];\n    }\n    /**\n     * Converts this stream to a newline-separated ReadableStream of\n     * JSON stringified values in the stream\n     * which can be turned back into a Stream with `Stream.fromReadableStream()`.\n     */\n    toReadableStream() {\n        const self = this;\n        let iter;\n        const encoder = new TextEncoder();\n        return new index_1.ReadableStream({\n            async start() {\n                iter = self[Symbol.asyncIterator]();\n            },\n            async pull(ctrl) {\n                try {\n                    const { value, done } = await iter.next();\n                    if (done)\n                        return ctrl.close();\n                    const bytes = encoder.encode(JSON.stringify(value) + '\\n');\n                    ctrl.enqueue(bytes);\n                }\n                catch (err) {\n                    ctrl.error(err);\n                }\n            },\n            async cancel() {\n                await iter.return?.();\n            },\n        });\n    }\n}\nexports.Stream = Stream;\nclass SSEDecoder {\n    constructor() {\n        this.event = null;\n        this.data = [];\n        this.chunks = [];\n    }\n    decode(line) {\n        if (line.endsWith('\\r')) {\n            line = line.substring(0, line.length - 1);\n        }\n        if (!line) {\n            // empty line and we didn't previously encounter any messages\n            if (!this.event && !this.data.length)\n                return null;\n            const sse = {\n                event: this.event,\n                data: this.data.join('\\n'),\n                raw: this.chunks,\n            };\n            this.event = null;\n            this.data = [];\n            this.chunks = [];\n            return sse;\n        }\n        this.chunks.push(line);\n        if (line.startsWith(':')) {\n            return null;\n        }\n        let [fieldname, _, value] = partition(line, ':');\n        if (value.startsWith(' ')) {\n            value = value.substring(1);\n        }\n        if (fieldname === 'event') {\n            this.event = value;\n        }\n        else if (fieldname === 'data') {\n            this.data.push(value);\n        }\n        return null;\n    }\n}\n/**\n * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally\n * reading lines from text.\n *\n * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258\n */\nclass LineDecoder {\n    constructor() {\n        this.buffer = [];\n        this.trailingCR = false;\n    }\n    decode(chunk) {\n        let text = this.decodeText(chunk);\n        if (this.trailingCR) {\n            text = '\\r' + text;\n            this.trailingCR = false;\n        }\n        if (text.endsWith('\\r')) {\n            this.trailingCR = true;\n            text = text.slice(0, -1);\n        }\n        if (!text) {\n            return [];\n        }\n        const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || '');\n        let lines = text.split(LineDecoder.NEWLINE_REGEXP);\n        if (lines.length === 1 && !trailingNewline) {\n            this.buffer.push(lines[0]);\n            return [];\n        }\n        if (this.buffer.length > 0) {\n            lines = [this.buffer.join('') + lines[0], ...lines.slice(1)];\n            this.buffer = [];\n        }\n        if (!trailingNewline) {\n            this.buffer = [lines.pop() || ''];\n        }\n        return lines;\n    }\n    decodeText(bytes) {\n        if (bytes == null)\n            return '';\n        if (typeof bytes === 'string')\n            return bytes;\n        // Node:\n        if (typeof Buffer !== 'undefined') {\n            if (bytes instanceof Buffer) {\n                return bytes.toString();\n            }\n            if (bytes instanceof Uint8Array) {\n                return Buffer.from(bytes).toString();\n            }\n            throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global \"Buffer\" defined, which this library assumes to be Node. Please report this error.`);\n        }\n        // Browser\n        if (typeof TextDecoder !== 'undefined') {\n            if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) {\n                this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8'));\n                return this.textDecoder.decode(bytes);\n            }\n            throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`);\n        }\n        throw new error_1.OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`);\n    }\n    flush() {\n        if (!this.buffer.length && !this.trailingCR) {\n            return [];\n        }\n        const lines = [this.buffer.join('')];\n        this.buffer = [];\n        this.trailingCR = false;\n        return lines;\n    }\n}\n// prettier-ignore\nLineDecoder.NEWLINE_CHARS = new Set(['\\n', '\\r', '\\x0b', '\\x0c', '\\x1c', '\\x1d', '\\x1e', '\\x85', '\\u2028', '\\u2029']);\nLineDecoder.NEWLINE_REGEXP = /\\r\\n|[\\n\\r\\x0b\\x0c\\x1c\\x1d\\x1e\\x85\\u2028\\u2029]/g;\nfunction partition(str, delimiter) {\n    const index = str.indexOf(delimiter);\n    if (index !== -1) {\n        return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];\n    }\n    return [str, '', ''];\n}\n/**\n * Most browsers don't yet have async iterable support for ReadableStream,\n * and Node has a very different way of reading bytes from its \"ReadableStream\".\n *\n * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490\n */\nfunction readableStreamAsyncIterable(stream) {\n    if (stream[Symbol.asyncIterator])\n        return stream;\n    const reader = stream.getReader();\n    return {\n        async next() {\n            try {\n                const result = await reader.read();\n                if (result?.done)\n                    reader.releaseLock(); // release lock when stream becomes closed\n                return result;\n            }\n            catch (e) {\n                reader.releaseLock(); // release lock when stream becomes errored\n                throw e;\n            }\n        },\n        async return() {\n            const cancelPromise = reader.cancel();\n            reader.releaseLock();\n            await cancelPromise;\n            return { done: true, value: undefined };\n        },\n        [Symbol.asyncIterator]() {\n            return this;\n        },\n    };\n}\n//# sourceMappingURL=streaming.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = exports.isMultipartBody = exports.toFile = exports.isUploadable = exports.isBlobLike = exports.isFileLike = exports.isResponseLike = exports.fileFromPath = void 0;\nconst index_1 = require(\"./_shims/index.js\");\nvar index_2 = require(\"./_shims/index.js\");\nObject.defineProperty(exports, \"fileFromPath\", { enumerable: true, get: function () { return index_2.fileFromPath; } });\nconst isResponseLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.url === 'string' &&\n    typeof value.blob === 'function';\nexports.isResponseLike = isResponseLike;\nconst isFileLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.name === 'string' &&\n    typeof value.lastModified === 'number' &&\n    (0, exports.isBlobLike)(value);\nexports.isFileLike = isFileLike;\n/**\n * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check\n * adds the arrayBuffer() method type because it is available and used at runtime\n */\nconst isBlobLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.size === 'number' &&\n    typeof value.type === 'string' &&\n    typeof value.text === 'function' &&\n    typeof value.slice === 'function' &&\n    typeof value.arrayBuffer === 'function';\nexports.isBlobLike = isBlobLike;\nconst isUploadable = (value) => {\n    return (0, exports.isFileLike)(value) || (0, exports.isResponseLike)(value) || (0, index_1.isFsReadStream)(value);\n};\nexports.isUploadable = isUploadable;\n/**\n * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats\n * @param value the raw content of the file.  Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s\n * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible\n * @param {Object=} options additional properties\n * @param {string=} options.type the MIME type of the content\n * @param {number=} options.lastModified the last modified timestamp\n * @returns a {@link File} with the given properties\n */\nasync function toFile(value, name, options = {}) {\n    // If it's a promise, resolve it.\n    value = await value;\n    if ((0, exports.isResponseLike)(value)) {\n        const blob = await value.blob();\n        name || (name = new URL(value.url).pathname.split(/[\\\\/]/).pop() ?? 'unknown_file');\n        return new index_1.File([blob], name, options);\n    }\n    const bits = await getBytes(value);\n    name || (name = getName(value) ?? 'unknown_file');\n    if (!options.type) {\n        const type = bits[0]?.type;\n        if (typeof type === 'string') {\n            options = { ...options, type };\n        }\n    }\n    return new index_1.File(bits, name, options);\n}\nexports.toFile = toFile;\nasync function getBytes(value) {\n    let parts = [];\n    if (typeof value === 'string' ||\n        ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.\n        value instanceof ArrayBuffer) {\n        parts.push(value);\n    }\n    else if ((0, exports.isBlobLike)(value)) {\n        parts.push(await value.arrayBuffer());\n    }\n    else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc.\n    ) {\n        for await (const chunk of value) {\n            parts.push(chunk); // TODO, consider validating?\n        }\n    }\n    else {\n        throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor?.name}; props: ${propsForError(value)}`);\n    }\n    return parts;\n}\nfunction propsForError(value) {\n    const props = Object.getOwnPropertyNames(value);\n    return `[${props.map((p) => `\"${p}\"`).join(', ')}]`;\n}\nfunction getName(value) {\n    return (getStringFromMaybeBuffer(value.name) ||\n        getStringFromMaybeBuffer(value.filename) ||\n        // For fs.ReadStream\n        getStringFromMaybeBuffer(value.path)?.split(/[\\\\/]/).pop());\n}\nconst getStringFromMaybeBuffer = (x) => {\n    if (typeof x === 'string')\n        return x;\n    if (typeof Buffer !== 'undefined' && x instanceof Buffer)\n        return String(x);\n    return undefined;\n};\nconst isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function';\nconst isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody';\nexports.isMultipartBody = isMultipartBody;\n/**\n * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value.\n * Otherwise returns the request as is.\n */\nconst maybeMultipartFormRequestOptions = async (opts) => {\n    if (!hasUploadableValue(opts.body))\n        return opts;\n    const form = await (0, exports.createForm)(opts.body);\n    return (0, index_1.getMultipartRequestOptions)(form, opts);\n};\nexports.maybeMultipartFormRequestOptions = maybeMultipartFormRequestOptions;\nconst multipartFormRequestOptions = async (opts) => {\n    const form = await (0, exports.createForm)(opts.body);\n    return (0, index_1.getMultipartRequestOptions)(form, opts);\n};\nexports.multipartFormRequestOptions = multipartFormRequestOptions;\nconst createForm = async (body) => {\n    const form = new index_1.FormData();\n    await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value)));\n    return form;\n};\nexports.createForm = createForm;\nconst hasUploadableValue = (value) => {\n    if ((0, exports.isUploadable)(value))\n        return true;\n    if (Array.isArray(value))\n        return value.some(hasUploadableValue);\n    if (value && typeof value === 'object') {\n        for (const k in value) {\n            if (hasUploadableValue(value[k]))\n                return true;\n        }\n    }\n    return false;\n};\nconst addFormValue = async (form, key, value) => {\n    if (value === undefined)\n        return;\n    if (value == null) {\n        throw new TypeError(`Received null for \"${key}\"; to pass null in FormData, you must use the string 'null'`);\n    }\n    // TODO: make nested formats configurable\n    if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {\n        form.append(key, String(value));\n    }\n    else if ((0, exports.isUploadable)(value)) {\n        const file = await toFile(value);\n        form.append(key, file);\n    }\n    else if (Array.isArray(value)) {\n        await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry)));\n    }\n    else if (typeof value === 'object') {\n        await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop)));\n    }\n    else {\n        throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);\n    }\n};\n//# sourceMappingURL=uploads.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.VERSION = void 0;\nexports.VERSION = '4.20.1'; // x-release-please-version\n//# sourceMappingURL=version.js.map","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""}
\ No newline at end of file
+{"version":3,"file":"index.js","mappings":";;;;;;;AAAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/UA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1RA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5lBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpkCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3JA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9HA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1MA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACt2BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1vDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChMA;;;;;;;;;ACAA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACPA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AAEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpnIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5LA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChCA;;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;;;;;;;;ACAA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7HA;AACA;;;;;;;;;ACDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACXA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;;;;;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/EA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACLA;AACA;AACA;AACA;AACA;;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACZA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5zBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9IA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtQA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACnBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACrCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC5GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC3DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACRA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACbA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACpFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC7CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AChBA;AACA;AACA;AACA;;;;;;;;ACHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;AC1VA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;ACjKA;AACA;AACA;AACA;AACA;;;;;;;;ACJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5NA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvMA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChOA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACrJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACdA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjLA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC7DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClHA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzNA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9sBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC77BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC1GA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACfA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACpBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC9CA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/DA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5BA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjDA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC3EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnEA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC5EA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACjFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxCA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AC/FA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACxGA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACnIA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AChJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACtFA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACzBA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;ACvJA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;AClVA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;;;;;;;;;;;;;AC3OA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;AACA;;;AC7BA;AACA;;;;AEDA;AACA;AACA;AACA","sources":["../webpack://open-ai-reviewer/./lib/main.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/command.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/core.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/file-command.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/oidc-utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/path-utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/summary.js","../webpack://open-ai-reviewer/./node_modules/@actions/core/lib/utils.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/auth.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/index.js","../webpack://open-ai-reviewer/./node_modules/@actions/http-client/lib/proxy.js","../webpack://open-ai-reviewer/./node_modules/@octokit/auth-token/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/core/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/endpoint/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/graphql/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-paginate-rest/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-request-log/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/plugin-rest-endpoint-methods/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/request-error/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/request/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/@octokit/rest/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/abort-controller/dist/abort-controller.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/index.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/agent.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/constants.js","../webpack://open-ai-reviewer/./node_modules/agentkeepalive/lib/https_agent.js","../webpack://open-ai-reviewer/./node_modules/balanced-match/index.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/index.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/add.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/register.js","../webpack://open-ai-reviewer/./node_modules/before-after-hook/lib/remove.js","../webpack://open-ai-reviewer/./node_modules/brace-expansion/index.js","../webpack://open-ai-reviewer/./node_modules/deprecation/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/event-target-shim/dist/event-target-shim.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/node_modules/web-streams-polyfill/dist/ponyfill.js","../webpack://open-ai-reviewer/./node_modules/humanize-ms/index.js","../webpack://open-ai-reviewer/./node_modules/is-plain-object/dist/is-plain-object.js","../webpack://open-ai-reviewer/./node_modules/ms/index.js","../webpack://open-ai-reviewer/./node_modules/node-domexception/index.js","../webpack://open-ai-reviewer/./node_modules/node-fetch/lib/index.js","../webpack://open-ai-reviewer/./node_modules/once/once.js","../webpack://open-ai-reviewer/./node_modules/parse-diff/index.js","../webpack://open-ai-reviewer/./node_modules/tr46/index.js","../webpack://open-ai-reviewer/./node_modules/tunnel/index.js","../webpack://open-ai-reviewer/./node_modules/tunnel/lib/tunnel.js","../webpack://open-ai-reviewer/./node_modules/universal-user-agent/dist-node/index.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/index.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/md5.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/nil.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/parse.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/regex.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/rng.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/sha1.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/stringify.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v1.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v3.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v35.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v4.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/v5.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/validate.js","../webpack://open-ai-reviewer/./node_modules/uuid/dist/version.js","../webpack://open-ai-reviewer/./node_modules/web-streams-polyfill/dist/ponyfill.es2018.js","../webpack://open-ai-reviewer/./node_modules/webidl-conversions/lib/index.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/URL-impl.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/URL.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/public-api.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/url-state-machine.js","../webpack://open-ai-reviewer/./node_modules/whatwg-url/lib/utils.js","../webpack://open-ai-reviewer/./node_modules/wrappy/wrappy.js","../webpack://open-ai-reviewer/./node_modules/@vercel/ncc/dist/ncc/@@notfound.js","../webpack://open-ai-reviewer/external node-commonjs \"assert\"","../webpack://open-ai-reviewer/external node-commonjs \"crypto\"","../webpack://open-ai-reviewer/external node-commonjs \"events\"","../webpack://open-ai-reviewer/external node-commonjs \"fs\"","../webpack://open-ai-reviewer/external node-commonjs \"http\"","../webpack://open-ai-reviewer/external node-commonjs \"https\"","../webpack://open-ai-reviewer/external node-commonjs \"net\"","../webpack://open-ai-reviewer/external node-commonjs \"node:fs\"","../webpack://open-ai-reviewer/external node-commonjs \"node:stream\"","../webpack://open-ai-reviewer/external node-commonjs \"os\"","../webpack://open-ai-reviewer/external node-commonjs \"path\"","../webpack://open-ai-reviewer/external node-commonjs \"punycode\"","../webpack://open-ai-reviewer/external node-commonjs \"stream\"","../webpack://open-ai-reviewer/external node-commonjs \"tls\"","../webpack://open-ai-reviewer/external node-commonjs \"url\"","../webpack://open-ai-reviewer/external node-commonjs \"util\"","../webpack://open-ai-reviewer/external node-commonjs \"worker_threads\"","../webpack://open-ai-reviewer/external node-commonjs \"zlib\"","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FileLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FormDataEncoder.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/FormDataLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/createBoundary.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/escapeName.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFileLike.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFormData.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isFunction.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/isPlainObject.js","../webpack://open-ai-reviewer/./node_modules/form-data-encoder/lib/cjs/util/normalizeValue.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/Blob.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/File.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/FormData.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/blobHelpers.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/deprecateConstructorEntries.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/fileFromPath.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isBlob.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isFile.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isFunction.js","../webpack://open-ai-reviewer/./node_modules/formdata-node/lib/cjs/isPlainObject.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/brace-expressions.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/escape.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/index-cjs.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/index.js","../webpack://open-ai-reviewer/./node_modules/minimatch/dist/cjs/unescape.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/MultipartBody.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/auto/runtime-node.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/index.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/node-runtime.js","../webpack://open-ai-reviewer/./node_modules/openai/_shims/registry.js","../webpack://open-ai-reviewer/./node_modules/openai/core.js","../webpack://open-ai-reviewer/./node_modules/openai/error.js","../webpack://open-ai-reviewer/./node_modules/openai/index.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/AbstractChatCompletionRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionStream.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/ChatCompletionStreamingRunner.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/RunnableFunction.js","../webpack://open-ai-reviewer/./node_modules/openai/lib/chatCompletionUtils.js","../webpack://open-ai-reviewer/./node_modules/openai/pagination.js","../webpack://open-ai-reviewer/./node_modules/openai/resource.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/audio.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/speech.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/transcriptions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/audio/translations.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/assistants/assistants.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/assistants/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/beta.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/chat/chat.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/chat/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/messages/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/messages/messages.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/runs/runs.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/runs/steps.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/beta/threads/threads.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/chat.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/chat/index.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/completions.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/edits.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/embeddings.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/files.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tunes.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tuning/fine-tuning.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/fine-tuning/jobs.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/images.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/index.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/models.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/moderations.js","../webpack://open-ai-reviewer/./node_modules/openai/resources/shared.js","../webpack://open-ai-reviewer/./node_modules/openai/streaming.js","../webpack://open-ai-reviewer/./node_modules/openai/uploads.js","../webpack://open-ai-reviewer/./node_modules/openai/version.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/compose-collection.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/compose-doc.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/compose-node.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/compose-scalar.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/composer.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-block-map.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-block-scalar.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-block-seq.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-end.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-flow-collection.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-flow-scalar.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/resolve-props.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/util-contains-newline.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/util-empty-scalar-position.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/util-flow-indent-check.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/compose/util-map-includes.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/doc/Document.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/doc/anchors.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/doc/applyReviver.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/doc/createNode.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/doc/directives.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/errors.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/index.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/log.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/Alias.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/Collection.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/Node.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/Pair.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/Scalar.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/YAMLMap.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/YAMLSeq.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/addPairToJSMap.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/identity.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/nodes/toJS.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/cst-scalar.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/cst-stringify.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/cst-visit.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/cst.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/lexer.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/line-counter.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/parse/parser.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/public-api.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/Schema.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/common/map.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/common/null.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/common/seq.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/common/string.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/core/bool.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/core/float.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/core/int.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/core/schema.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/json/schema.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/tags.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/binary.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/bool.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/float.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/int.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/merge.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/omap.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/pairs.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/schema.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/set.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/schema/yaml-1.1/timestamp.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/foldFlowLines.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringify.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyCollection.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyComment.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyDocument.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyNumber.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyPair.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/stringify/stringifyString.js","../webpack://open-ai-reviewer/./node_modules/yaml/dist/visit.js","../webpack://open-ai-reviewer/webpack/bootstrap","../webpack://open-ai-reviewer/webpack/runtime/compat","../webpack://open-ai-reviewer/webpack/before-startup","../webpack://open-ai-reviewer/webpack/startup","../webpack://open-ai-reviewer/webpack/after-startup"],"sourcesContent":["\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n    return new (P || (P = Promise))(function (resolve, reject) {\n        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n        function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n        step((generator = generator.apply(thisArg, _arguments || [])).next());\n    });\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n    return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getApplicableRules = exports.readRules = void 0;\nconst fs_1 = require(\"fs\");\nconst core = __importStar(require(\"@actions/core\"));\nconst openai_1 = __importDefault(require(\"openai\"));\nconst rest_1 = require(\"@octokit/rest\");\nconst parse_diff_1 = __importDefault(require(\"parse-diff\"));\nconst minimatch_1 = __importDefault(require(\"minimatch\"));\nconst yaml_1 = require(\"yaml\");\nconst GITHUB_TOKEN = core.getInput(\"GITHUB_TOKEN\");\nconst OPENAI_API_KEY = core.getInput(\"OPENAI_API_KEY\");\nconst OPENAI_API_MODEL = core.getInput(\"OPENAI_API_MODEL\");\nconst octokit = new rest_1.Octokit({ auth: GITHUB_TOKEN });\nconst openai = new openai_1.default({\n    apiKey: OPENAI_API_KEY,\n});\nfunction getPRDetails() {\n    var _a, _b;\n    return __awaiter(this, void 0, void 0, function* () {\n        const { repository, number } = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH || \"\", \"utf8\"));\n        const prResponse = yield octokit.pulls.get({\n            owner: repository.owner.login,\n            repo: repository.name,\n            pull_number: number,\n        });\n        return {\n            owner: repository.owner.login,\n            repo: repository.name,\n            pull_number: number,\n            title: (_a = prResponse.data.title) !== null && _a !== void 0 ? _a : \"\",\n            description: (_b = prResponse.data.body) !== null && _b !== void 0 ? _b : \"\",\n        };\n    });\n}\nfunction getDiff(owner, repo, pull_number) {\n    return __awaiter(this, void 0, void 0, function* () {\n        const response = yield octokit.pulls.get({\n            owner,\n            repo,\n            pull_number,\n            mediaType: { format: \"diff\" },\n        });\n        // @ts-expect-error - response.data is a string\n        return response.data;\n    });\n}\nfunction analyzeCode(parsedDiff, prDetails, rules) {\n    return __awaiter(this, void 0, void 0, function* () {\n        const comments = [];\n        for (const file of parsedDiff) {\n            if (file.to === \"/dev/null\")\n                continue; // Ignore deleted files\n            for (const chunk of file.chunks) {\n                const prompt = createPrompt(file, chunk, prDetails, rules);\n                const aiResponse = yield getAIResponse(prompt);\n                if (aiResponse) {\n                    const newComments = createComment(file, chunk, aiResponse);\n                    if (newComments) {\n                        comments.push(...newComments);\n                    }\n                }\n            }\n        }\n        return comments;\n    });\n}\nfunction getApplicableRules(rules, file) {\n    const { extensions, directories, global } = rules;\n    const extensionsKeys = Object.keys(extensions);\n    const applicableExtensionKeys = extensionsKeys.filter((key) => { var _a; return (_a = file.to) === null || _a === void 0 ? void 0 : _a.endsWith(key); });\n    const extensionRules = applicableExtensionKeys.flatMap((key) => extensions[key]);\n    const directoriesKeys = Object.keys(directories);\n    const applicableDirectoriesKeys = directoriesKeys.filter((key) => file.to ? (0, minimatch_1.default)(file.to, key) : false);\n    const directoryRules = applicableDirectoriesKeys.flatMap((key) => directories[key]);\n    return [...global, ...extensionRules, ...directoryRules];\n}\nexports.getApplicableRules = getApplicableRules;\nfunction createPrompt(file, chunk, prDetails, rules) {\n    var _a;\n    const applicableRules = getApplicableRules(rules, file);\n    return `Your task is to review pull requests. Instructions:\n- Provide the response in following JSON format:  {\"reviews\": [{\"lineNumber\":  , \"reviewComment\": \"\"}]}\n- Do not give positive comments or compliments.\n- Provide comments and suggestions ONLY if there is something to improve, otherwise \"reviews\" should be an empty array.\n- Write the comment in GitHub Markdown format.\n- Use the given description only for the overall context and only comment the code.\n- IMPORTANT: NEVER suggest adding comments to the code.\n\nReview the following code diff in the file \"${file.to}\" and take the pull request title and description into account when writing the response.\n\n${(_a = applicableRules.length) !== null && _a !== void 0 ? _a : (`Always check the following rules to write the review:\n  ${applicableRules.join(\"\\n\")}\n`)}    \n  \nPull request title: ${prDetails.title}\nPull request description:\n\n---\n${prDetails.description}\n---\n\nGit diff to review:\n\n\\`\\`\\`diff\n${chunk.content}\n${chunk.changes\n        // @ts-expect-error - ln and ln2 exists where needed\n        .map((c) => `${c.ln ? c.ln : c.ln2} ${c.content}`)\n        .join(\"\\n\")}\n\\`\\`\\`\n`;\n}\nfunction getAIResponse(prompt) {\n    var _a, _b;\n    return __awaiter(this, void 0, void 0, function* () {\n        const queryConfig = {\n            model: OPENAI_API_MODEL,\n            temperature: 0.2,\n            max_tokens: 700,\n            top_p: 1,\n            frequency_penalty: 0,\n            presence_penalty: 0,\n        };\n        try {\n            const response = yield openai.chat.completions.create(Object.assign(Object.assign({}, queryConfig), { response_format: { type: \"json_object\" }, messages: [\n                    {\n                        role: \"system\",\n                        content: prompt,\n                    },\n                ] }));\n            const res = ((_b = (_a = response.choices[0].message) === null || _a === void 0 ? void 0 : _a.content) === null || _b === void 0 ? void 0 : _b.trim()) || \"{}\";\n            return JSON.parse(res).reviews;\n        }\n        catch (error) {\n            console.error(\"Error:\", error);\n            return null;\n        }\n    });\n}\nfunction createComment(file, chunk, aiResponses) {\n    return aiResponses.flatMap((aiResponse) => {\n        if (!file.to) {\n            return [];\n        }\n        return {\n            body: aiResponse.reviewComment,\n            path: file.to,\n            line: Number(aiResponse.lineNumber),\n        };\n    });\n}\nfunction createReviewComment(owner, repo, pull_number, comments) {\n    return __awaiter(this, void 0, void 0, function* () {\n        yield octokit.pulls.createReview({\n            owner,\n            repo,\n            pull_number,\n            comments,\n            event: \"COMMENT\",\n        });\n    });\n}\nfunction main() {\n    var _a;\n    return __awaiter(this, void 0, void 0, function* () {\n        const prDetails = yield getPRDetails();\n        let diff;\n        const eventData = JSON.parse((0, fs_1.readFileSync)((_a = process.env.GITHUB_EVENT_PATH) !== null && _a !== void 0 ? _a : \"\", \"utf8\"));\n        if (eventData.action === \"opened\") {\n            diff = yield getDiff(prDetails.owner, prDetails.repo, prDetails.pull_number);\n        }\n        else if (eventData.action === \"synchronize\") {\n            const newBaseSha = eventData.before;\n            const newHeadSha = eventData.after;\n            const response = yield octokit.repos.compareCommits({\n                headers: {\n                    accept: \"application/vnd.github.v3.diff\",\n                },\n                owner: prDetails.owner,\n                repo: prDetails.repo,\n                base: newBaseSha,\n                head: newHeadSha,\n            });\n            diff = String(response.data);\n        }\n        else {\n            console.log(\"Unsupported event:\", process.env.GITHUB_EVENT_NAME);\n            return;\n        }\n        if (!diff) {\n            console.log(\"No diff found\");\n            return;\n        }\n        const parsedDiff = (0, parse_diff_1.default)(diff);\n        const rulesFiles = core\n            .getInput(\"rules_file_name\")\n            .trim();\n        const rules = readRules(rulesFiles);\n        const { ignore: allIgnorePatterns = [] } = rules;\n        const filteredDiff = parsedDiff.filter((file) => {\n            return !allIgnorePatterns.some((pattern) => { var _a; return (0, minimatch_1.default)((_a = file.to) !== null && _a !== void 0 ? _a : \"\", pattern); });\n        });\n        const comments = yield analyzeCode(filteredDiff, prDetails, rules);\n        if (comments.length > 0) {\n            yield createReviewComment(prDetails.owner, prDetails.repo, prDetails.pull_number, comments);\n        }\n    });\n}\nfunction readRules(fileName) {\n    const fileContents = (0, fs_1.readFileSync)(fileName, \"utf8\");\n    const parsed = (0, yaml_1.parse)(fileContents, { mapAsMap: true });\n    // Transform the parsed Map into a plain object\n    const rules = {\n        global: parsed.get('global') || [],\n        extensions: {},\n        directories: {},\n        ignore: parsed.get('ignore') || []\n    };\n    // Handle extensions map\n    const extensionsMap = parsed.get('extensions');\n    if (extensionsMap instanceof Map) {\n        for (const [key, value] of extensionsMap.entries()) {\n            // If key is an array, create an entry for each extension\n            if (Array.isArray(key)) {\n                key.forEach(ext => {\n                    rules.extensions[ext] = value;\n                });\n            }\n            else {\n                rules.extensions[key] = value;\n            }\n        }\n    }\n    // Handle directories map\n    const directoriesMap = parsed.get('directories');\n    if (directoriesMap instanceof Map) {\n        for (const [key, value] of directoriesMap.entries()) {\n            rules.directories[key] = value;\n        }\n    }\n    return rules;\n}\nexports.readRules = readRules;\nexports.default = main;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.issue = exports.issueCommand = void 0;\nconst os = __importStar(require(\"os\"));\nconst utils_1 = require(\"./utils\");\n/**\n * Commands\n *\n * Command Format:\n *   ::name key=value,key=value::message\n *\n * Examples:\n *   ::warning::This is the message\n *   ::set-env name=MY_VAR::some value\n */\nfunction issueCommand(command, properties, message) {\n    const cmd = new Command(command, properties, message);\n    process.stdout.write(cmd.toString() + os.EOL);\n}\nexports.issueCommand = issueCommand;\nfunction issue(name, message = '') {\n    issueCommand(name, {}, message);\n}\nexports.issue = issue;\nconst CMD_STRING = '::';\nclass Command {\n    constructor(command, properties, message) {\n        if (!command) {\n            command = 'missing.command';\n        }\n        this.command = command;\n        this.properties = properties;\n        this.message = message;\n    }\n    toString() {\n        let cmdStr = CMD_STRING + this.command;\n        if (this.properties && Object.keys(this.properties).length > 0) {\n            cmdStr += ' ';\n            let first = true;\n            for (const key in this.properties) {\n                if (this.properties.hasOwnProperty(key)) {\n                    const val = this.properties[key];\n                    if (val) {\n                        if (first) {\n                            first = false;\n                        }\n                        else {\n                            cmdStr += ',';\n                        }\n                        cmdStr += `${key}=${escapeProperty(val)}`;\n                    }\n                }\n            }\n        }\n        cmdStr += `${CMD_STRING}${escapeData(this.message)}`;\n        return cmdStr;\n    }\n}\nfunction escapeData(s) {\n    return utils_1.toCommandValue(s)\n        .replace(/%/g, '%25')\n        .replace(/\\r/g, '%0D')\n        .replace(/\\n/g, '%0A');\n}\nfunction escapeProperty(s) {\n    return utils_1.toCommandValue(s)\n        .replace(/%/g, '%25')\n        .replace(/\\r/g, '%0D')\n        .replace(/\\n/g, '%0A')\n        .replace(/:/g, '%3A')\n        .replace(/,/g, '%2C');\n}\n//# sourceMappingURL=command.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n    return new (P || (P = Promise))(function (resolve, reject) {\n        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n        function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n        step((generator = generator.apply(thisArg, _arguments || [])).next());\n    });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;\nconst command_1 = require(\"./command\");\nconst file_command_1 = require(\"./file-command\");\nconst utils_1 = require(\"./utils\");\nconst os = __importStar(require(\"os\"));\nconst path = __importStar(require(\"path\"));\nconst oidc_utils_1 = require(\"./oidc-utils\");\n/**\n * The code to exit an action\n */\nvar ExitCode;\n(function (ExitCode) {\n    /**\n     * A code indicating that the action was successful\n     */\n    ExitCode[ExitCode[\"Success\"] = 0] = \"Success\";\n    /**\n     * A code indicating that the action was a failure\n     */\n    ExitCode[ExitCode[\"Failure\"] = 1] = \"Failure\";\n})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));\n//-----------------------------------------------------------------------\n// Variables\n//-----------------------------------------------------------------------\n/**\n * Sets env variable for this action and future actions in the job\n * @param name the name of the variable to set\n * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction exportVariable(name, val) {\n    const convertedVal = utils_1.toCommandValue(val);\n    process.env[name] = convertedVal;\n    const filePath = process.env['GITHUB_ENV'] || '';\n    if (filePath) {\n        return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));\n    }\n    command_1.issueCommand('set-env', { name }, convertedVal);\n}\nexports.exportVariable = exportVariable;\n/**\n * Registers a secret which will get masked from logs\n * @param secret value of the secret\n */\nfunction setSecret(secret) {\n    command_1.issueCommand('add-mask', {}, secret);\n}\nexports.setSecret = setSecret;\n/**\n * Prepends inputPath to the PATH (for this action and future actions)\n * @param inputPath\n */\nfunction addPath(inputPath) {\n    const filePath = process.env['GITHUB_PATH'] || '';\n    if (filePath) {\n        file_command_1.issueFileCommand('PATH', inputPath);\n    }\n    else {\n        command_1.issueCommand('add-path', {}, inputPath);\n    }\n    process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;\n}\nexports.addPath = addPath;\n/**\n * Gets the value of an input.\n * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.\n * Returns an empty string if the value is not defined.\n *\n * @param     name     name of the input to get\n * @param     options  optional. See InputOptions.\n * @returns   string\n */\nfunction getInput(name, options) {\n    const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';\n    if (options && options.required && !val) {\n        throw new Error(`Input required and not supplied: ${name}`);\n    }\n    if (options && options.trimWhitespace === false) {\n        return val;\n    }\n    return val.trim();\n}\nexports.getInput = getInput;\n/**\n * Gets the values of an multiline input.  Each value is also trimmed.\n *\n * @param     name     name of the input to get\n * @param     options  optional. See InputOptions.\n * @returns   string[]\n *\n */\nfunction getMultilineInput(name, options) {\n    const inputs = getInput(name, options)\n        .split('\\n')\n        .filter(x => x !== '');\n    if (options && options.trimWhitespace === false) {\n        return inputs;\n    }\n    return inputs.map(input => input.trim());\n}\nexports.getMultilineInput = getMultilineInput;\n/**\n * Gets the input value of the boolean type in the YAML 1.2 \"core schema\" specification.\n * Support boolean input list: `true | True | TRUE | false | False | FALSE` .\n * The return value is also in boolean type.\n * ref: https://yaml.org/spec/1.2/spec.html#id2804923\n *\n * @param     name     name of the input to get\n * @param     options  optional. See InputOptions.\n * @returns   boolean\n */\nfunction getBooleanInput(name, options) {\n    const trueValue = ['true', 'True', 'TRUE'];\n    const falseValue = ['false', 'False', 'FALSE'];\n    const val = getInput(name, options);\n    if (trueValue.includes(val))\n        return true;\n    if (falseValue.includes(val))\n        return false;\n    throw new TypeError(`Input does not meet YAML 1.2 \"Core Schema\" specification: ${name}\\n` +\n        `Support boolean input list: \\`true | True | TRUE | false | False | FALSE\\``);\n}\nexports.getBooleanInput = getBooleanInput;\n/**\n * Sets the value of an output.\n *\n * @param     name     name of the output to set\n * @param     value    value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction setOutput(name, value) {\n    const filePath = process.env['GITHUB_OUTPUT'] || '';\n    if (filePath) {\n        return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));\n    }\n    process.stdout.write(os.EOL);\n    command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));\n}\nexports.setOutput = setOutput;\n/**\n * Enables or disables the echoing of commands into stdout for the rest of the step.\n * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.\n *\n */\nfunction setCommandEcho(enabled) {\n    command_1.issue('echo', enabled ? 'on' : 'off');\n}\nexports.setCommandEcho = setCommandEcho;\n//-----------------------------------------------------------------------\n// Results\n//-----------------------------------------------------------------------\n/**\n * Sets the action status to failed.\n * When the action exits it will be with an exit code of 1\n * @param message add error issue message\n */\nfunction setFailed(message) {\n    process.exitCode = ExitCode.Failure;\n    error(message);\n}\nexports.setFailed = setFailed;\n//-----------------------------------------------------------------------\n// Logging Commands\n//-----------------------------------------------------------------------\n/**\n * Gets whether Actions Step Debug is on or not\n */\nfunction isDebug() {\n    return process.env['RUNNER_DEBUG'] === '1';\n}\nexports.isDebug = isDebug;\n/**\n * Writes debug message to user log\n * @param message debug message\n */\nfunction debug(message) {\n    command_1.issueCommand('debug', {}, message);\n}\nexports.debug = debug;\n/**\n * Adds an error issue\n * @param message error issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction error(message, properties = {}) {\n    command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.error = error;\n/**\n * Adds a warning issue\n * @param message warning issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction warning(message, properties = {}) {\n    command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.warning = warning;\n/**\n * Adds a notice issue\n * @param message notice issue message. Errors will be converted to string via toString()\n * @param properties optional properties to add to the annotation.\n */\nfunction notice(message, properties = {}) {\n    command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);\n}\nexports.notice = notice;\n/**\n * Writes info to log with console.log.\n * @param message info message\n */\nfunction info(message) {\n    process.stdout.write(message + os.EOL);\n}\nexports.info = info;\n/**\n * Begin an output group.\n *\n * Output until the next `groupEnd` will be foldable in this group\n *\n * @param name The name of the output group\n */\nfunction startGroup(name) {\n    command_1.issue('group', name);\n}\nexports.startGroup = startGroup;\n/**\n * End an output group.\n */\nfunction endGroup() {\n    command_1.issue('endgroup');\n}\nexports.endGroup = endGroup;\n/**\n * Wrap an asynchronous function call in a group.\n *\n * Returns the same type as the function itself.\n *\n * @param name The name of the group\n * @param fn The function to wrap in the group\n */\nfunction group(name, fn) {\n    return __awaiter(this, void 0, void 0, function* () {\n        startGroup(name);\n        let result;\n        try {\n            result = yield fn();\n        }\n        finally {\n            endGroup();\n        }\n        return result;\n    });\n}\nexports.group = group;\n//-----------------------------------------------------------------------\n// Wrapper action state\n//-----------------------------------------------------------------------\n/**\n * Saves state for current action, the state can only be retrieved by this action's post job execution.\n *\n * @param     name     name of the state to store\n * @param     value    value to store. Non-string values will be converted to a string via JSON.stringify\n */\n// eslint-disable-next-line @typescript-eslint/no-explicit-any\nfunction saveState(name, value) {\n    const filePath = process.env['GITHUB_STATE'] || '';\n    if (filePath) {\n        return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));\n    }\n    command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));\n}\nexports.saveState = saveState;\n/**\n * Gets the value of an state set by this action's main execution.\n *\n * @param     name     name of the state to get\n * @returns   string\n */\nfunction getState(name) {\n    return process.env[`STATE_${name}`] || '';\n}\nexports.getState = getState;\nfunction getIDToken(aud) {\n    return __awaiter(this, void 0, void 0, function* () {\n        return yield oidc_utils_1.OidcClient.getIDToken(aud);\n    });\n}\nexports.getIDToken = getIDToken;\n/**\n * Summary exports\n */\nvar summary_1 = require(\"./summary\");\nObject.defineProperty(exports, \"summary\", { enumerable: true, get: function () { return summary_1.summary; } });\n/**\n * @deprecated use core.summary\n */\nvar summary_2 = require(\"./summary\");\nObject.defineProperty(exports, \"markdownSummary\", { enumerable: true, get: function () { return summary_2.markdownSummary; } });\n/**\n * Path exports\n */\nvar path_utils_1 = require(\"./path-utils\");\nObject.defineProperty(exports, \"toPosixPath\", { enumerable: true, get: function () { return path_utils_1.toPosixPath; } });\nObject.defineProperty(exports, \"toWin32Path\", { enumerable: true, get: function () { return path_utils_1.toWin32Path; } });\nObject.defineProperty(exports, \"toPlatformPath\", { enumerable: true, get: function () { return path_utils_1.toPlatformPath; } });\n//# sourceMappingURL=core.js.map","\"use strict\";\n// For internal use, subject to change.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.prepareKeyValueMessage = exports.issueFileCommand = void 0;\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nconst fs = __importStar(require(\"fs\"));\nconst os = __importStar(require(\"os\"));\nconst uuid_1 = require(\"uuid\");\nconst utils_1 = require(\"./utils\");\nfunction issueFileCommand(command, message) {\n    const filePath = process.env[`GITHUB_${command}`];\n    if (!filePath) {\n        throw new Error(`Unable to find environment variable for file command ${command}`);\n    }\n    if (!fs.existsSync(filePath)) {\n        throw new Error(`Missing file at path: ${filePath}`);\n    }\n    fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {\n        encoding: 'utf8'\n    });\n}\nexports.issueFileCommand = issueFileCommand;\nfunction prepareKeyValueMessage(key, value) {\n    const delimiter = `ghadelimiter_${uuid_1.v4()}`;\n    const convertedValue = utils_1.toCommandValue(value);\n    // These should realistically never happen, but just in case someone finds a\n    // way to exploit uuid generation let's not allow keys or values that contain\n    // the delimiter.\n    if (key.includes(delimiter)) {\n        throw new Error(`Unexpected input: name should not contain the delimiter \"${delimiter}\"`);\n    }\n    if (convertedValue.includes(delimiter)) {\n        throw new Error(`Unexpected input: value should not contain the delimiter \"${delimiter}\"`);\n    }\n    return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;\n}\nexports.prepareKeyValueMessage = prepareKeyValueMessage;\n//# sourceMappingURL=file-command.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n    return new (P || (P = Promise))(function (resolve, reject) {\n        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n        function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n        step((generator = generator.apply(thisArg, _arguments || [])).next());\n    });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.OidcClient = void 0;\nconst http_client_1 = require(\"@actions/http-client\");\nconst auth_1 = require(\"@actions/http-client/lib/auth\");\nconst core_1 = require(\"./core\");\nclass OidcClient {\n    static createHttpClient(allowRetry = true, maxRetry = 10) {\n        const requestOptions = {\n            allowRetries: allowRetry,\n            maxRetries: maxRetry\n        };\n        return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);\n    }\n    static getRequestToken() {\n        const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];\n        if (!token) {\n            throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');\n        }\n        return token;\n    }\n    static getIDTokenUrl() {\n        const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];\n        if (!runtimeUrl) {\n            throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');\n        }\n        return runtimeUrl;\n    }\n    static getCall(id_token_url) {\n        var _a;\n        return __awaiter(this, void 0, void 0, function* () {\n            const httpclient = OidcClient.createHttpClient();\n            const res = yield httpclient\n                .getJson(id_token_url)\n                .catch(error => {\n                throw new Error(`Failed to get ID Token. \\n \n        Error Code : ${error.statusCode}\\n \n        Error Message: ${error.result.message}`);\n            });\n            const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;\n            if (!id_token) {\n                throw new Error('Response json body do not have ID Token field');\n            }\n            return id_token;\n        });\n    }\n    static getIDToken(audience) {\n        return __awaiter(this, void 0, void 0, function* () {\n            try {\n                // New ID Token is requested from action service\n                let id_token_url = OidcClient.getIDTokenUrl();\n                if (audience) {\n                    const encodedAudience = encodeURIComponent(audience);\n                    id_token_url = `${id_token_url}&audience=${encodedAudience}`;\n                }\n                core_1.debug(`ID token url is ${id_token_url}`);\n                const id_token = yield OidcClient.getCall(id_token_url);\n                core_1.setSecret(id_token);\n                return id_token;\n            }\n            catch (error) {\n                throw new Error(`Error message: ${error.message}`);\n            }\n        });\n    }\n}\nexports.OidcClient = OidcClient;\n//# sourceMappingURL=oidc-utils.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;\nconst path = __importStar(require(\"path\"));\n/**\n * toPosixPath converts the given path to the posix form. On Windows, \\\\ will be\n * replaced with /.\n *\n * @param pth. Path to transform.\n * @return string Posix path.\n */\nfunction toPosixPath(pth) {\n    return pth.replace(/[\\\\]/g, '/');\n}\nexports.toPosixPath = toPosixPath;\n/**\n * toWin32Path converts the given path to the win32 form. On Linux, / will be\n * replaced with \\\\.\n *\n * @param pth. Path to transform.\n * @return string Win32 path.\n */\nfunction toWin32Path(pth) {\n    return pth.replace(/[/]/g, '\\\\');\n}\nexports.toWin32Path = toWin32Path;\n/**\n * toPlatformPath converts the given path to a platform-specific path. It does\n * this by replacing instances of / and \\ with the platform-specific path\n * separator.\n *\n * @param pth The path to platformize.\n * @return string The platform-specific path.\n */\nfunction toPlatformPath(pth) {\n    return pth.replace(/[/\\\\]/g, path.sep);\n}\nexports.toPlatformPath = toPlatformPath;\n//# sourceMappingURL=path-utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n    function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n    return new (P || (P = Promise))(function (resolve, reject) {\n        function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n        function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n        function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n        step((generator = generator.apply(thisArg, _arguments || [])).next());\n    });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;\nconst os_1 = require(\"os\");\nconst fs_1 = require(\"fs\");\nconst { access, appendFile, writeFile } = fs_1.promises;\nexports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';\nexports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';\nclass Summary {\n    constructor() {\n        this._buffer = '';\n    }\n    /**\n     * Finds the summary file path from the environment, rejects if env var is not found or file does not exist\n     * Also checks r/w permissions.\n     *\n     * @returns step summary file path\n     */\n    filePath() {\n        return __awaiter(this, void 0, void 0, function* () {\n            if (this._filePath) {\n                return this._filePath;\n            }\n            const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];\n            if (!pathFromEnv) {\n                throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);\n            }\n            try {\n                yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);\n            }\n            catch (_a) {\n                throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);\n            }\n            this._filePath = pathFromEnv;\n            return this._filePath;\n        });\n    }\n    /**\n     * Wraps content in an HTML tag, adding any HTML attributes\n     *\n     * @param {string} tag HTML tag to wrap\n     * @param {string | null} content content within the tag\n     * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add\n     *\n     * @returns {string} content wrapped in HTML element\n     */\n    wrap(tag, content, attrs = {}) {\n        const htmlAttrs = Object.entries(attrs)\n            .map(([key, value]) => ` ${key}=\"${value}\"`)\n            .join('');\n        if (!content) {\n            return `<${tag}${htmlAttrs}>`;\n        }\n        return `<${tag}${htmlAttrs}>${content}`;\n    }\n    /**\n     * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.\n     *\n     * @param {SummaryWriteOptions} [options] (optional) options for write operation\n     *\n     * @returns {Promise} summary instance\n     */\n    write(options) {\n        return __awaiter(this, void 0, void 0, function* () {\n            const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);\n            const filePath = yield this.filePath();\n            const writeFunc = overwrite ? writeFile : appendFile;\n            yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });\n            return this.emptyBuffer();\n        });\n    }\n    /**\n     * Clears the summary buffer and wipes the summary file\n     *\n     * @returns {Summary} summary instance\n     */\n    clear() {\n        return __awaiter(this, void 0, void 0, function* () {\n            return this.emptyBuffer().write({ overwrite: true });\n        });\n    }\n    /**\n     * Returns the current summary buffer as a string\n     *\n     * @returns {string} string of summary buffer\n     */\n    stringify() {\n        return this._buffer;\n    }\n    /**\n     * If the summary buffer is empty\n     *\n     * @returns {boolen} true if the buffer is empty\n     */\n    isEmptyBuffer() {\n        return this._buffer.length === 0;\n    }\n    /**\n     * Resets the summary buffer without writing to summary file\n     *\n     * @returns {Summary} summary instance\n     */\n    emptyBuffer() {\n        this._buffer = '';\n        return this;\n    }\n    /**\n     * Adds raw text to the summary buffer\n     *\n     * @param {string} text content to add\n     * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)\n     *\n     * @returns {Summary} summary instance\n     */\n    addRaw(text, addEOL = false) {\n        this._buffer += text;\n        return addEOL ? this.addEOL() : this;\n    }\n    /**\n     * Adds the operating system-specific end-of-line marker to the buffer\n     *\n     * @returns {Summary} summary instance\n     */\n    addEOL() {\n        return this.addRaw(os_1.EOL);\n    }\n    /**\n     * Adds an HTML codeblock to the summary buffer\n     *\n     * @param {string} code content to render within fenced code block\n     * @param {string} lang (optional) language to syntax highlight code\n     *\n     * @returns {Summary} summary instance\n     */\n    addCodeBlock(code, lang) {\n        const attrs = Object.assign({}, (lang && { lang }));\n        const element = this.wrap('pre', this.wrap('code', code), attrs);\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds an HTML list to the summary buffer\n     *\n     * @param {string[]} items list of items to render\n     * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)\n     *\n     * @returns {Summary} summary instance\n     */\n    addList(items, ordered = false) {\n        const tag = ordered ? 'ol' : 'ul';\n        const listItems = items.map(item => this.wrap('li', item)).join('');\n        const element = this.wrap(tag, listItems);\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds an HTML table to the summary buffer\n     *\n     * @param {SummaryTableCell[]} rows table rows\n     *\n     * @returns {Summary} summary instance\n     */\n    addTable(rows) {\n        const tableBody = rows\n            .map(row => {\n            const cells = row\n                .map(cell => {\n                if (typeof cell === 'string') {\n                    return this.wrap('td', cell);\n                }\n                const { header, data, colspan, rowspan } = cell;\n                const tag = header ? 'th' : 'td';\n                const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));\n                return this.wrap(tag, data, attrs);\n            })\n                .join('');\n            return this.wrap('tr', cells);\n        })\n            .join('');\n        const element = this.wrap('table', tableBody);\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds a collapsable HTML details element to the summary buffer\n     *\n     * @param {string} label text for the closed state\n     * @param {string} content collapsable content\n     *\n     * @returns {Summary} summary instance\n     */\n    addDetails(label, content) {\n        const element = this.wrap('details', this.wrap('summary', label) + content);\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds an HTML image tag to the summary buffer\n     *\n     * @param {string} src path to the image you to embed\n     * @param {string} alt text description of the image\n     * @param {SummaryImageOptions} options (optional) addition image attributes\n     *\n     * @returns {Summary} summary instance\n     */\n    addImage(src, alt, options) {\n        const { width, height } = options || {};\n        const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));\n        const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds an HTML section heading element\n     *\n     * @param {string} text heading text\n     * @param {number | string} [level=1] (optional) the heading level, default: 1\n     *\n     * @returns {Summary} summary instance\n     */\n    addHeading(text, level) {\n        const tag = `h${level}`;\n        const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)\n            ? tag\n            : 'h1';\n        const element = this.wrap(allowedTag, text);\n        return this.addRaw(element).addEOL();\n    }\n    /**\n     * Adds an HTML thematic break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addSeparator() {\n const element = this.wrap('hr', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML line break (
) to the summary buffer\n *\n * @returns {Summary} summary instance\n */\n addBreak() {\n const element = this.wrap('br', null);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML blockquote to the summary buffer\n *\n * @param {string} text quote text\n * @param {string} cite (optional) citation url\n *\n * @returns {Summary} summary instance\n */\n addQuote(text, cite) {\n const attrs = Object.assign({}, (cite && { cite }));\n const element = this.wrap('blockquote', text, attrs);\n return this.addRaw(element).addEOL();\n }\n /**\n * Adds an HTML anchor tag to the summary buffer\n *\n * @param {string} text link text/content\n * @param {string} href hyperlink\n *\n * @returns {Summary} summary instance\n */\n addLink(text, href) {\n const element = this.wrap('a', text, { href });\n return this.addRaw(element).addEOL();\n }\n}\nconst _summary = new Summary();\n/**\n * @deprecated use `core.summary`\n */\nexports.markdownSummary = _summary;\nexports.summary = _summary;\n//# sourceMappingURL=summary.js.map","\"use strict\";\n// We use any as a valid input type\n/* eslint-disable @typescript-eslint/no-explicit-any */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toCommandProperties = exports.toCommandValue = void 0;\n/**\n * Sanitizes an input into a string so it can be passed into issueCommand safely\n * @param input input to sanitize into a string\n */\nfunction toCommandValue(input) {\n if (input === null || input === undefined) {\n return '';\n }\n else if (typeof input === 'string' || input instanceof String) {\n return input;\n }\n return JSON.stringify(input);\n}\nexports.toCommandValue = toCommandValue;\n/**\n *\n * @param annotationProperties\n * @returns The command properties to send with the actual annotation command\n * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646\n */\nfunction toCommandProperties(annotationProperties) {\n if (!Object.keys(annotationProperties).length) {\n return {};\n }\n return {\n title: annotationProperties.title,\n file: annotationProperties.file,\n line: annotationProperties.startLine,\n endLine: annotationProperties.endLine,\n col: annotationProperties.startColumn,\n endColumn: annotationProperties.endColumn\n };\n}\nexports.toCommandProperties = toCommandProperties;\n//# sourceMappingURL=utils.js.map","\"use strict\";\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;\nclass BasicCredentialHandler {\n constructor(username, password) {\n this.username = username;\n this.password = password;\n }\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BasicCredentialHandler = BasicCredentialHandler;\nclass BearerCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Bearer ${this.token}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.BearerCredentialHandler = BearerCredentialHandler;\nclass PersonalAccessTokenCredentialHandler {\n constructor(token) {\n this.token = token;\n }\n // currently implements pre-authorization\n // TODO: support preAuth = false where it hooks on 401\n prepareRequest(options) {\n if (!options.headers) {\n throw Error('The request has no headers');\n }\n options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;\n }\n // This handler cannot handle 401\n canHandleAuthentication() {\n return false;\n }\n handleAuthentication() {\n return __awaiter(this, void 0, void 0, function* () {\n throw new Error('not implemented');\n });\n }\n}\nexports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;\n//# sourceMappingURL=auth.js.map","\"use strict\";\n/* eslint-disable @typescript-eslint/no-explicit-any */\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n if (mod && mod.__esModule) return mod;\n var result = {};\n if (mod != null) for (var k in mod) if (k !== \"default\" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n __setModuleDefault(result, mod);\n return result;\n};\nvar __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {\n function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }\n return new (P || (P = Promise))(function (resolve, reject) {\n function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }\n function rejected(value) { try { step(generator[\"throw\"](value)); } catch (e) { reject(e); } }\n function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }\n step((generator = generator.apply(thisArg, _arguments || [])).next());\n });\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;\nconst http = __importStar(require(\"http\"));\nconst https = __importStar(require(\"https\"));\nconst pm = __importStar(require(\"./proxy\"));\nconst tunnel = __importStar(require(\"tunnel\"));\nvar HttpCodes;\n(function (HttpCodes) {\n HttpCodes[HttpCodes[\"OK\"] = 200] = \"OK\";\n HttpCodes[HttpCodes[\"MultipleChoices\"] = 300] = \"MultipleChoices\";\n HttpCodes[HttpCodes[\"MovedPermanently\"] = 301] = \"MovedPermanently\";\n HttpCodes[HttpCodes[\"ResourceMoved\"] = 302] = \"ResourceMoved\";\n HttpCodes[HttpCodes[\"SeeOther\"] = 303] = \"SeeOther\";\n HttpCodes[HttpCodes[\"NotModified\"] = 304] = \"NotModified\";\n HttpCodes[HttpCodes[\"UseProxy\"] = 305] = \"UseProxy\";\n HttpCodes[HttpCodes[\"SwitchProxy\"] = 306] = \"SwitchProxy\";\n HttpCodes[HttpCodes[\"TemporaryRedirect\"] = 307] = \"TemporaryRedirect\";\n HttpCodes[HttpCodes[\"PermanentRedirect\"] = 308] = \"PermanentRedirect\";\n HttpCodes[HttpCodes[\"BadRequest\"] = 400] = \"BadRequest\";\n HttpCodes[HttpCodes[\"Unauthorized\"] = 401] = \"Unauthorized\";\n HttpCodes[HttpCodes[\"PaymentRequired\"] = 402] = \"PaymentRequired\";\n HttpCodes[HttpCodes[\"Forbidden\"] = 403] = \"Forbidden\";\n HttpCodes[HttpCodes[\"NotFound\"] = 404] = \"NotFound\";\n HttpCodes[HttpCodes[\"MethodNotAllowed\"] = 405] = \"MethodNotAllowed\";\n HttpCodes[HttpCodes[\"NotAcceptable\"] = 406] = \"NotAcceptable\";\n HttpCodes[HttpCodes[\"ProxyAuthenticationRequired\"] = 407] = \"ProxyAuthenticationRequired\";\n HttpCodes[HttpCodes[\"RequestTimeout\"] = 408] = \"RequestTimeout\";\n HttpCodes[HttpCodes[\"Conflict\"] = 409] = \"Conflict\";\n HttpCodes[HttpCodes[\"Gone\"] = 410] = \"Gone\";\n HttpCodes[HttpCodes[\"TooManyRequests\"] = 429] = \"TooManyRequests\";\n HttpCodes[HttpCodes[\"InternalServerError\"] = 500] = \"InternalServerError\";\n HttpCodes[HttpCodes[\"NotImplemented\"] = 501] = \"NotImplemented\";\n HttpCodes[HttpCodes[\"BadGateway\"] = 502] = \"BadGateway\";\n HttpCodes[HttpCodes[\"ServiceUnavailable\"] = 503] = \"ServiceUnavailable\";\n HttpCodes[HttpCodes[\"GatewayTimeout\"] = 504] = \"GatewayTimeout\";\n})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));\nvar Headers;\n(function (Headers) {\n Headers[\"Accept\"] = \"accept\";\n Headers[\"ContentType\"] = \"content-type\";\n})(Headers = exports.Headers || (exports.Headers = {}));\nvar MediaTypes;\n(function (MediaTypes) {\n MediaTypes[\"ApplicationJson\"] = \"application/json\";\n})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));\n/**\n * Returns the proxy URL, depending upon the supplied url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\nfunction getProxyUrl(serverUrl) {\n const proxyUrl = pm.getProxyUrl(new URL(serverUrl));\n return proxyUrl ? proxyUrl.href : '';\n}\nexports.getProxyUrl = getProxyUrl;\nconst HttpRedirectCodes = [\n HttpCodes.MovedPermanently,\n HttpCodes.ResourceMoved,\n HttpCodes.SeeOther,\n HttpCodes.TemporaryRedirect,\n HttpCodes.PermanentRedirect\n];\nconst HttpResponseRetryCodes = [\n HttpCodes.BadGateway,\n HttpCodes.ServiceUnavailable,\n HttpCodes.GatewayTimeout\n];\nconst RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];\nconst ExponentialBackoffCeiling = 10;\nconst ExponentialBackoffTimeSlice = 5;\nclass HttpClientError extends Error {\n constructor(message, statusCode) {\n super(message);\n this.name = 'HttpClientError';\n this.statusCode = statusCode;\n Object.setPrototypeOf(this, HttpClientError.prototype);\n }\n}\nexports.HttpClientError = HttpClientError;\nclass HttpClientResponse {\n constructor(message) {\n this.message = message;\n }\n readBody() {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {\n let output = Buffer.alloc(0);\n this.message.on('data', (chunk) => {\n output = Buffer.concat([output, chunk]);\n });\n this.message.on('end', () => {\n resolve(output.toString());\n });\n }));\n });\n }\n}\nexports.HttpClientResponse = HttpClientResponse;\nfunction isHttps(requestUrl) {\n const parsedUrl = new URL(requestUrl);\n return parsedUrl.protocol === 'https:';\n}\nexports.isHttps = isHttps;\nclass HttpClient {\n constructor(userAgent, handlers, requestOptions) {\n this._ignoreSslError = false;\n this._allowRedirects = true;\n this._allowRedirectDowngrade = false;\n this._maxRedirects = 50;\n this._allowRetries = false;\n this._maxRetries = 1;\n this._keepAlive = false;\n this._disposed = false;\n this.userAgent = userAgent;\n this.handlers = handlers || [];\n this.requestOptions = requestOptions;\n if (requestOptions) {\n if (requestOptions.ignoreSslError != null) {\n this._ignoreSslError = requestOptions.ignoreSslError;\n }\n this._socketTimeout = requestOptions.socketTimeout;\n if (requestOptions.allowRedirects != null) {\n this._allowRedirects = requestOptions.allowRedirects;\n }\n if (requestOptions.allowRedirectDowngrade != null) {\n this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;\n }\n if (requestOptions.maxRedirects != null) {\n this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);\n }\n if (requestOptions.keepAlive != null) {\n this._keepAlive = requestOptions.keepAlive;\n }\n if (requestOptions.allowRetries != null) {\n this._allowRetries = requestOptions.allowRetries;\n }\n if (requestOptions.maxRetries != null) {\n this._maxRetries = requestOptions.maxRetries;\n }\n }\n }\n options(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});\n });\n }\n get(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('GET', requestUrl, null, additionalHeaders || {});\n });\n }\n del(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('DELETE', requestUrl, null, additionalHeaders || {});\n });\n }\n post(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('POST', requestUrl, data, additionalHeaders || {});\n });\n }\n patch(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PATCH', requestUrl, data, additionalHeaders || {});\n });\n }\n put(requestUrl, data, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('PUT', requestUrl, data, additionalHeaders || {});\n });\n }\n head(requestUrl, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request('HEAD', requestUrl, null, additionalHeaders || {});\n });\n }\n sendStream(verb, requestUrl, stream, additionalHeaders) {\n return __awaiter(this, void 0, void 0, function* () {\n return this.request(verb, requestUrl, stream, additionalHeaders);\n });\n }\n /**\n * Gets a typed object from an endpoint\n * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise\n */\n getJson(requestUrl, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n const res = yield this.get(requestUrl, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n postJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.post(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n putJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.put(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n patchJson(requestUrl, obj, additionalHeaders = {}) {\n return __awaiter(this, void 0, void 0, function* () {\n const data = JSON.stringify(obj, null, 2);\n additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);\n additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);\n const res = yield this.patch(requestUrl, data, additionalHeaders);\n return this._processResponse(res, this.requestOptions);\n });\n }\n /**\n * Makes a raw http request.\n * All other methods such as get, post, patch, and request ultimately call this.\n * Prefer get, del, post and patch\n */\n request(verb, requestUrl, data, headers) {\n return __awaiter(this, void 0, void 0, function* () {\n if (this._disposed) {\n throw new Error('Client has already been disposed.');\n }\n const parsedUrl = new URL(requestUrl);\n let info = this._prepareRequest(verb, parsedUrl, headers);\n // Only perform retries on reads since writes may not be idempotent.\n const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)\n ? this._maxRetries + 1\n : 1;\n let numTries = 0;\n let response;\n do {\n response = yield this.requestRaw(info, data);\n // Check if it's an authentication challenge\n if (response &&\n response.message &&\n response.message.statusCode === HttpCodes.Unauthorized) {\n let authenticationHandler;\n for (const handler of this.handlers) {\n if (handler.canHandleAuthentication(response)) {\n authenticationHandler = handler;\n break;\n }\n }\n if (authenticationHandler) {\n return authenticationHandler.handleAuthentication(this, info, data);\n }\n else {\n // We have received an unauthorized response but have no handlers to handle it.\n // Let the response return to the caller.\n return response;\n }\n }\n let redirectsRemaining = this._maxRedirects;\n while (response.message.statusCode &&\n HttpRedirectCodes.includes(response.message.statusCode) &&\n this._allowRedirects &&\n redirectsRemaining > 0) {\n const redirectUrl = response.message.headers['location'];\n if (!redirectUrl) {\n // if there's no location to redirect to, we won't\n break;\n }\n const parsedRedirectUrl = new URL(redirectUrl);\n if (parsedUrl.protocol === 'https:' &&\n parsedUrl.protocol !== parsedRedirectUrl.protocol &&\n !this._allowRedirectDowngrade) {\n throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');\n }\n // we need to finish reading the response before reassigning response\n // which will leak the open socket.\n yield response.readBody();\n // strip authorization header if redirected to a different hostname\n if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {\n for (const header in headers) {\n // header names are case insensitive\n if (header.toLowerCase() === 'authorization') {\n delete headers[header];\n }\n }\n }\n // let's make the request with the new redirectUrl\n info = this._prepareRequest(verb, parsedRedirectUrl, headers);\n response = yield this.requestRaw(info, data);\n redirectsRemaining--;\n }\n if (!response.message.statusCode ||\n !HttpResponseRetryCodes.includes(response.message.statusCode)) {\n // If not a retry code, return immediately instead of retrying\n return response;\n }\n numTries += 1;\n if (numTries < maxTries) {\n yield response.readBody();\n yield this._performExponentialBackoff(numTries);\n }\n } while (numTries < maxTries);\n return response;\n });\n }\n /**\n * Needs to be called if keepAlive is set to true in request options.\n */\n dispose() {\n if (this._agent) {\n this._agent.destroy();\n }\n this._disposed = true;\n }\n /**\n * Raw request.\n * @param info\n * @param data\n */\n requestRaw(info, data) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => {\n function callbackForResult(err, res) {\n if (err) {\n reject(err);\n }\n else if (!res) {\n // If `err` is not passed, then `res` must be passed.\n reject(new Error('Unknown error'));\n }\n else {\n resolve(res);\n }\n }\n this.requestRawWithCallback(info, data, callbackForResult);\n });\n });\n }\n /**\n * Raw request with callback.\n * @param info\n * @param data\n * @param onResult\n */\n requestRawWithCallback(info, data, onResult) {\n if (typeof data === 'string') {\n if (!info.options.headers) {\n info.options.headers = {};\n }\n info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');\n }\n let callbackCalled = false;\n function handleResult(err, res) {\n if (!callbackCalled) {\n callbackCalled = true;\n onResult(err, res);\n }\n }\n const req = info.httpModule.request(info.options, (msg) => {\n const res = new HttpClientResponse(msg);\n handleResult(undefined, res);\n });\n let socket;\n req.on('socket', sock => {\n socket = sock;\n });\n // If we ever get disconnected, we want the socket to timeout eventually\n req.setTimeout(this._socketTimeout || 3 * 60000, () => {\n if (socket) {\n socket.end();\n }\n handleResult(new Error(`Request timeout: ${info.options.path}`));\n });\n req.on('error', function (err) {\n // err has statusCode property\n // res should have headers\n handleResult(err);\n });\n if (data && typeof data === 'string') {\n req.write(data, 'utf8');\n }\n if (data && typeof data !== 'string') {\n data.on('close', function () {\n req.end();\n });\n data.pipe(req);\n }\n else {\n req.end();\n }\n }\n /**\n * Gets an http agent. This function is useful when you need an http agent that handles\n * routing through a proxy server - depending upon the url and proxy environment variables.\n * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com\n */\n getAgent(serverUrl) {\n const parsedUrl = new URL(serverUrl);\n return this._getAgent(parsedUrl);\n }\n _prepareRequest(method, requestUrl, headers) {\n const info = {};\n info.parsedUrl = requestUrl;\n const usingSsl = info.parsedUrl.protocol === 'https:';\n info.httpModule = usingSsl ? https : http;\n const defaultPort = usingSsl ? 443 : 80;\n info.options = {};\n info.options.host = info.parsedUrl.hostname;\n info.options.port = info.parsedUrl.port\n ? parseInt(info.parsedUrl.port)\n : defaultPort;\n info.options.path =\n (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');\n info.options.method = method;\n info.options.headers = this._mergeHeaders(headers);\n if (this.userAgent != null) {\n info.options.headers['user-agent'] = this.userAgent;\n }\n info.options.agent = this._getAgent(info.parsedUrl);\n // gives handlers an opportunity to participate\n if (this.handlers) {\n for (const handler of this.handlers) {\n handler.prepareRequest(info.options);\n }\n }\n return info;\n }\n _mergeHeaders(headers) {\n if (this.requestOptions && this.requestOptions.headers) {\n return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));\n }\n return lowercaseKeys(headers || {});\n }\n _getExistingOrDefaultHeader(additionalHeaders, header, _default) {\n let clientHeader;\n if (this.requestOptions && this.requestOptions.headers) {\n clientHeader = lowercaseKeys(this.requestOptions.headers)[header];\n }\n return additionalHeaders[header] || clientHeader || _default;\n }\n _getAgent(parsedUrl) {\n let agent;\n const proxyUrl = pm.getProxyUrl(parsedUrl);\n const useProxy = proxyUrl && proxyUrl.hostname;\n if (this._keepAlive && useProxy) {\n agent = this._proxyAgent;\n }\n if (this._keepAlive && !useProxy) {\n agent = this._agent;\n }\n // if agent is already assigned use that agent.\n if (agent) {\n return agent;\n }\n const usingSsl = parsedUrl.protocol === 'https:';\n let maxSockets = 100;\n if (this.requestOptions) {\n maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;\n }\n // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.\n if (proxyUrl && proxyUrl.hostname) {\n const agentOptions = {\n maxSockets,\n keepAlive: this._keepAlive,\n proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {\n proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`\n })), { host: proxyUrl.hostname, port: proxyUrl.port })\n };\n let tunnelAgent;\n const overHttps = proxyUrl.protocol === 'https:';\n if (usingSsl) {\n tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;\n }\n else {\n tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;\n }\n agent = tunnelAgent(agentOptions);\n this._proxyAgent = agent;\n }\n // if reusing agent across request and tunneling agent isn't assigned create a new agent\n if (this._keepAlive && !agent) {\n const options = { keepAlive: this._keepAlive, maxSockets };\n agent = usingSsl ? new https.Agent(options) : new http.Agent(options);\n this._agent = agent;\n }\n // if not using private agent and tunnel agent isn't setup then use global agent\n if (!agent) {\n agent = usingSsl ? https.globalAgent : http.globalAgent;\n }\n if (usingSsl && this._ignoreSslError) {\n // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process\n // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options\n // we have to cast it to any and change it directly\n agent.options = Object.assign(agent.options || {}, {\n rejectUnauthorized: false\n });\n }\n return agent;\n }\n _performExponentialBackoff(retryNumber) {\n return __awaiter(this, void 0, void 0, function* () {\n retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);\n const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);\n return new Promise(resolve => setTimeout(() => resolve(), ms));\n });\n }\n _processResponse(res, options) {\n return __awaiter(this, void 0, void 0, function* () {\n return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {\n const statusCode = res.message.statusCode || 0;\n const response = {\n statusCode,\n result: null,\n headers: {}\n };\n // not found leads to null obj returned\n if (statusCode === HttpCodes.NotFound) {\n resolve(response);\n }\n // get the result from the body\n function dateTimeDeserializer(key, value) {\n if (typeof value === 'string') {\n const a = new Date(value);\n if (!isNaN(a.valueOf())) {\n return a;\n }\n }\n return value;\n }\n let obj;\n let contents;\n try {\n contents = yield res.readBody();\n if (contents && contents.length > 0) {\n if (options && options.deserializeDates) {\n obj = JSON.parse(contents, dateTimeDeserializer);\n }\n else {\n obj = JSON.parse(contents);\n }\n response.result = obj;\n }\n response.headers = res.message.headers;\n }\n catch (err) {\n // Invalid resource (contents not json); leaving result obj null\n }\n // note that 3xx redirects are handled by the http layer.\n if (statusCode > 299) {\n let msg;\n // if exception/error in body, attempt to get better error\n if (obj && obj.message) {\n msg = obj.message;\n }\n else if (contents && contents.length > 0) {\n // it may be the case that the exception is in the body message as string\n msg = contents;\n }\n else {\n msg = `Failed request: (${statusCode})`;\n }\n const err = new HttpClientError(msg, statusCode);\n err.result = response.result;\n reject(err);\n }\n else {\n resolve(response);\n }\n }));\n });\n }\n}\nexports.HttpClient = HttpClient;\nconst lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.checkBypass = exports.getProxyUrl = void 0;\nfunction getProxyUrl(reqUrl) {\n const usingSsl = reqUrl.protocol === 'https:';\n if (checkBypass(reqUrl)) {\n return undefined;\n }\n const proxyVar = (() => {\n if (usingSsl) {\n return process.env['https_proxy'] || process.env['HTTPS_PROXY'];\n }\n else {\n return process.env['http_proxy'] || process.env['HTTP_PROXY'];\n }\n })();\n if (proxyVar) {\n return new URL(proxyVar);\n }\n else {\n return undefined;\n }\n}\nexports.getProxyUrl = getProxyUrl;\nfunction checkBypass(reqUrl) {\n if (!reqUrl.hostname) {\n return false;\n }\n const reqHost = reqUrl.hostname;\n if (isLoopbackAddress(reqHost)) {\n return true;\n }\n const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';\n if (!noProxy) {\n return false;\n }\n // Determine the request port\n let reqPort;\n if (reqUrl.port) {\n reqPort = Number(reqUrl.port);\n }\n else if (reqUrl.protocol === 'http:') {\n reqPort = 80;\n }\n else if (reqUrl.protocol === 'https:') {\n reqPort = 443;\n }\n // Format the request hostname and hostname with port\n const upperReqHosts = [reqUrl.hostname.toUpperCase()];\n if (typeof reqPort === 'number') {\n upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);\n }\n // Compare request host against noproxy\n for (const upperNoProxyItem of noProxy\n .split(',')\n .map(x => x.trim().toUpperCase())\n .filter(x => x)) {\n if (upperNoProxyItem === '*' ||\n upperReqHosts.some(x => x === upperNoProxyItem ||\n x.endsWith(`.${upperNoProxyItem}`) ||\n (upperNoProxyItem.startsWith('.') &&\n x.endsWith(`${upperNoProxyItem}`)))) {\n return true;\n }\n }\n return false;\n}\nexports.checkBypass = checkBypass;\nfunction isLoopbackAddress(host) {\n const hostLower = host.toLowerCase();\n return (hostLower === 'localhost' ||\n hostLower.startsWith('127.') ||\n hostLower.startsWith('[::1]') ||\n hostLower.startsWith('[0:0:0:0:0:0:0:1]'));\n}\n//# sourceMappingURL=proxy.js.map","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst REGEX_IS_INSTALLATION_LEGACY = /^v1\\./;\nconst REGEX_IS_INSTALLATION = /^ghs_/;\nconst REGEX_IS_USER_TO_SERVER = /^ghu_/;\nasync function auth(token) {\n const isApp = token.split(/\\./).length === 3;\n const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);\n const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);\n const tokenType = isApp ? \"app\" : isInstallation ? \"installation\" : isUserToServer ? \"user-to-server\" : \"oauth\";\n return {\n type: \"token\",\n token: token,\n tokenType\n };\n}\n\n/**\n * Prefix token for usage in the Authorization header\n *\n * @param token OAuth token or JSON Web Token\n */\nfunction withAuthorizationPrefix(token) {\n if (token.split(/\\./).length === 3) {\n return `bearer ${token}`;\n }\n return `token ${token}`;\n}\n\nasync function hook(token, request, route, parameters) {\n const endpoint = request.endpoint.merge(route, parameters);\n endpoint.headers.authorization = withAuthorizationPrefix(token);\n return request(endpoint);\n}\n\nconst createTokenAuth = function createTokenAuth(token) {\n if (!token) {\n throw new Error(\"[@octokit/auth-token] No token passed to createTokenAuth\");\n }\n if (typeof token !== \"string\") {\n throw new Error(\"[@octokit/auth-token] Token passed to createTokenAuth is not a string\");\n }\n token = token.replace(/^(token|bearer) +/i, \"\");\n return Object.assign(auth.bind(null, token), {\n hook: hook.bind(null, token)\n });\n};\n\nexports.createTokenAuth = createTokenAuth;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar universalUserAgent = require('universal-user-agent');\nvar beforeAfterHook = require('before-after-hook');\nvar request = require('@octokit/request');\nvar graphql = require('@octokit/graphql');\nvar authToken = require('@octokit/auth-token');\n\nconst VERSION = \"4.2.0\";\n\nclass Octokit {\n constructor(options = {}) {\n const hook = new beforeAfterHook.Collection();\n const requestDefaults = {\n baseUrl: request.request.endpoint.DEFAULTS.baseUrl,\n headers: {},\n request: Object.assign({}, options.request, {\n // @ts-ignore internal usage only, no need to type\n hook: hook.bind(null, \"request\")\n }),\n mediaType: {\n previews: [],\n format: \"\"\n }\n }; // prepend default user agent with `options.userAgent` if set\n\n requestDefaults.headers[\"user-agent\"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(\" \");\n\n if (options.baseUrl) {\n requestDefaults.baseUrl = options.baseUrl;\n }\n\n if (options.previews) {\n requestDefaults.mediaType.previews = options.previews;\n }\n\n if (options.timeZone) {\n requestDefaults.headers[\"time-zone\"] = options.timeZone;\n }\n\n this.request = request.request.defaults(requestDefaults);\n this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);\n this.log = Object.assign({\n debug: () => {},\n info: () => {},\n warn: console.warn.bind(console),\n error: console.error.bind(console)\n }, options.log);\n this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance\n // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.\n // (2) If only `options.auth` is set, use the default token authentication strategy.\n // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.\n // TODO: type `options.auth` based on `options.authStrategy`.\n\n if (!options.authStrategy) {\n if (!options.auth) {\n // (1)\n this.auth = async () => ({\n type: \"unauthenticated\"\n });\n } else {\n // (2)\n const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n }\n } else {\n const {\n authStrategy,\n ...otherOptions\n } = options;\n const auth = authStrategy(Object.assign({\n request: this.request,\n log: this.log,\n // we pass the current octokit instance as well as its constructor options\n // to allow for authentication strategies that return a new octokit instance\n // that shares the same internal state as the current one. The original\n // requirement for this was the \"event-octokit\" authentication strategy\n // of https://github.com/probot/octokit-auth-probot.\n octokit: this,\n octokitOptions: otherOptions\n }, options.auth)); // @ts-ignore ¯\\_(ツ)_/¯\n\n hook.wrap(\"request\", auth.hook);\n this.auth = auth;\n } // apply plugins\n // https://stackoverflow.com/a/16345172\n\n\n const classConstructor = this.constructor;\n classConstructor.plugins.forEach(plugin => {\n Object.assign(this, plugin(this, options));\n });\n }\n\n static defaults(defaults) {\n const OctokitWithDefaults = class extends this {\n constructor(...args) {\n const options = args[0] || {};\n\n if (typeof defaults === \"function\") {\n super(defaults(options));\n return;\n }\n\n super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {\n userAgent: `${options.userAgent} ${defaults.userAgent}`\n } : null));\n }\n\n };\n return OctokitWithDefaults;\n }\n /**\n * Attach a plugin (or many) to your Octokit instance.\n *\n * @example\n * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)\n */\n\n\n static plugin(...newPlugins) {\n var _a;\n\n const currentPlugins = this.plugins;\n const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);\n return NewOctokit;\n }\n\n}\nOctokit.VERSION = VERSION;\nOctokit.plugins = [];\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar isPlainObject = require('is-plain-object');\nvar universalUserAgent = require('universal-user-agent');\n\nfunction lowercaseKeys(object) {\n if (!object) {\n return {};\n }\n return Object.keys(object).reduce((newObj, key) => {\n newObj[key.toLowerCase()] = object[key];\n return newObj;\n }, {});\n}\n\nfunction mergeDeep(defaults, options) {\n const result = Object.assign({}, defaults);\n Object.keys(options).forEach(key => {\n if (isPlainObject.isPlainObject(options[key])) {\n if (!(key in defaults)) Object.assign(result, {\n [key]: options[key]\n });else result[key] = mergeDeep(defaults[key], options[key]);\n } else {\n Object.assign(result, {\n [key]: options[key]\n });\n }\n });\n return result;\n}\n\nfunction removeUndefinedProperties(obj) {\n for (const key in obj) {\n if (obj[key] === undefined) {\n delete obj[key];\n }\n }\n return obj;\n}\n\nfunction merge(defaults, route, options) {\n if (typeof route === \"string\") {\n let [method, url] = route.split(\" \");\n options = Object.assign(url ? {\n method,\n url\n } : {\n url: method\n }, options);\n } else {\n options = Object.assign({}, route);\n }\n // lowercase header names before merging with defaults to avoid duplicates\n options.headers = lowercaseKeys(options.headers);\n // remove properties with undefined values before merging\n removeUndefinedProperties(options);\n removeUndefinedProperties(options.headers);\n const mergedOptions = mergeDeep(defaults || {}, options);\n // mediaType.previews arrays are merged, instead of overwritten\n if (defaults && defaults.mediaType.previews.length) {\n mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);\n }\n mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, \"\"));\n return mergedOptions;\n}\n\nfunction addQueryParameters(url, parameters) {\n const separator = /\\?/.test(url) ? \"&\" : \"?\";\n const names = Object.keys(parameters);\n if (names.length === 0) {\n return url;\n }\n return url + separator + names.map(name => {\n if (name === \"q\") {\n return \"q=\" + parameters.q.split(\"+\").map(encodeURIComponent).join(\"+\");\n }\n return `${name}=${encodeURIComponent(parameters[name])}`;\n }).join(\"&\");\n}\n\nconst urlVariableRegex = /\\{[^}]+\\}/g;\nfunction removeNonChars(variableName) {\n return variableName.replace(/^\\W+|\\W+$/g, \"\").split(/,/);\n}\nfunction extractUrlVariableNames(url) {\n const matches = url.match(urlVariableRegex);\n if (!matches) {\n return [];\n }\n return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);\n}\n\nfunction omit(object, keysToOmit) {\n return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {\n obj[key] = object[key];\n return obj;\n }, {});\n}\n\n// Based on https://github.com/bramstein/url-template, licensed under BSD\n// TODO: create separate package.\n//\n// Copyright (c) 2012-2014, Bram Stein\n// All rights reserved.\n// Redistribution and use in source and binary forms, with or without\n// modification, are permitted provided that the following conditions\n// are met:\n// 1. Redistributions of source code must retain the above copyright\n// notice, this list of conditions and the following disclaimer.\n// 2. Redistributions in binary form must reproduce the above copyright\n// notice, this list of conditions and the following disclaimer in the\n// documentation and/or other materials provided with the distribution.\n// 3. The name of the author may not be used to endorse or promote products\n// derived from this software without specific prior written permission.\n// THIS SOFTWARE IS PROVIDED BY THE AUTHOR \"AS IS\" AND ANY EXPRESS OR IMPLIED\n// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF\n// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO\n// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,\n// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,\n// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY\n// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING\n// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,\n// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n/* istanbul ignore file */\nfunction encodeReserved(str) {\n return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {\n if (!/%[0-9A-Fa-f]/.test(part)) {\n part = encodeURI(part).replace(/%5B/g, \"[\").replace(/%5D/g, \"]\");\n }\n return part;\n }).join(\"\");\n}\nfunction encodeUnreserved(str) {\n return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {\n return \"%\" + c.charCodeAt(0).toString(16).toUpperCase();\n });\n}\nfunction encodeValue(operator, value, key) {\n value = operator === \"+\" || operator === \"#\" ? encodeReserved(value) : encodeUnreserved(value);\n if (key) {\n return encodeUnreserved(key) + \"=\" + value;\n } else {\n return value;\n }\n}\nfunction isDefined(value) {\n return value !== undefined && value !== null;\n}\nfunction isKeyOperator(operator) {\n return operator === \";\" || operator === \"&\" || operator === \"?\";\n}\nfunction getValues(context, operator, key, modifier) {\n var value = context[key],\n result = [];\n if (isDefined(value) && value !== \"\") {\n if (typeof value === \"string\" || typeof value === \"number\" || typeof value === \"boolean\") {\n value = value.toString();\n if (modifier && modifier !== \"*\") {\n value = value.substring(0, parseInt(modifier, 10));\n }\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n } else {\n if (modifier === \"*\") {\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : \"\"));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n result.push(encodeValue(operator, value[k], k));\n }\n });\n }\n } else {\n const tmp = [];\n if (Array.isArray(value)) {\n value.filter(isDefined).forEach(function (value) {\n tmp.push(encodeValue(operator, value));\n });\n } else {\n Object.keys(value).forEach(function (k) {\n if (isDefined(value[k])) {\n tmp.push(encodeUnreserved(k));\n tmp.push(encodeValue(operator, value[k].toString()));\n }\n });\n }\n if (isKeyOperator(operator)) {\n result.push(encodeUnreserved(key) + \"=\" + tmp.join(\",\"));\n } else if (tmp.length !== 0) {\n result.push(tmp.join(\",\"));\n }\n }\n }\n } else {\n if (operator === \";\") {\n if (isDefined(value)) {\n result.push(encodeUnreserved(key));\n }\n } else if (value === \"\" && (operator === \"&\" || operator === \"?\")) {\n result.push(encodeUnreserved(key) + \"=\");\n } else if (value === \"\") {\n result.push(\"\");\n }\n }\n return result;\n}\nfunction parseUrl(template) {\n return {\n expand: expand.bind(null, template)\n };\n}\nfunction expand(template, context) {\n var operators = [\"+\", \"#\", \".\", \"/\", \";\", \"?\", \"&\"];\n return template.replace(/\\{([^\\{\\}]+)\\}|([^\\{\\}]+)/g, function (_, expression, literal) {\n if (expression) {\n let operator = \"\";\n const values = [];\n if (operators.indexOf(expression.charAt(0)) !== -1) {\n operator = expression.charAt(0);\n expression = expression.substr(1);\n }\n expression.split(/,/g).forEach(function (variable) {\n var tmp = /([^:\\*]*)(?::(\\d+)|(\\*))?/.exec(variable);\n values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));\n });\n if (operator && operator !== \"+\") {\n var separator = \",\";\n if (operator === \"?\") {\n separator = \"&\";\n } else if (operator !== \"#\") {\n separator = operator;\n }\n return (values.length !== 0 ? operator : \"\") + values.join(separator);\n } else {\n return values.join(\",\");\n }\n } else {\n return encodeReserved(literal);\n }\n });\n}\n\nfunction parse(options) {\n // https://fetch.spec.whatwg.org/#methods\n let method = options.method.toUpperCase();\n // replace :varname with {varname} to make it RFC 6570 compatible\n let url = (options.url || \"/\").replace(/:([a-z]\\w+)/g, \"{$1}\");\n let headers = Object.assign({}, options.headers);\n let body;\n let parameters = omit(options, [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"mediaType\"]);\n // extract variable names from URL to calculate remaining variables later\n const urlVariableNames = extractUrlVariableNames(url);\n url = parseUrl(url).expand(parameters);\n if (!/^http/.test(url)) {\n url = options.baseUrl + url;\n }\n const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat(\"baseUrl\");\n const remainingParameters = omit(parameters, omittedParameters);\n const isBinaryRequest = /application\\/octet-stream/i.test(headers.accept);\n if (!isBinaryRequest) {\n if (options.mediaType.format) {\n // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw\n headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\\/vnd(\\.\\w+)(\\.v3)?(\\.\\w+)?(\\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(\",\");\n }\n if (options.mediaType.previews.length) {\n const previewsFromAcceptHeader = headers.accept.match(/[\\w-]+(?=-preview)/g) || [];\n headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {\n const format = options.mediaType.format ? `.${options.mediaType.format}` : \"+json\";\n return `application/vnd.github.${preview}-preview${format}`;\n }).join(\",\");\n }\n }\n // for GET/HEAD requests, set URL query parameters from remaining parameters\n // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters\n if ([\"GET\", \"HEAD\"].includes(method)) {\n url = addQueryParameters(url, remainingParameters);\n } else {\n if (\"data\" in remainingParameters) {\n body = remainingParameters.data;\n } else {\n if (Object.keys(remainingParameters).length) {\n body = remainingParameters;\n }\n }\n }\n // default content-type for JSON if body is set\n if (!headers[\"content-type\"] && typeof body !== \"undefined\") {\n headers[\"content-type\"] = \"application/json; charset=utf-8\";\n }\n // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.\n // fetch does not allow to set `content-length` header, but we can set body to an empty string\n if ([\"PATCH\", \"PUT\"].includes(method) && typeof body === \"undefined\") {\n body = \"\";\n }\n // Only return body/request keys if present\n return Object.assign({\n method,\n url,\n headers\n }, typeof body !== \"undefined\" ? {\n body\n } : null, options.request ? {\n request: options.request\n } : null);\n}\n\nfunction endpointWithDefaults(defaults, route, options) {\n return parse(merge(defaults, route, options));\n}\n\nfunction withDefaults(oldDefaults, newDefaults) {\n const DEFAULTS = merge(oldDefaults, newDefaults);\n const endpoint = endpointWithDefaults.bind(null, DEFAULTS);\n return Object.assign(endpoint, {\n DEFAULTS,\n defaults: withDefaults.bind(null, DEFAULTS),\n merge: merge.bind(null, DEFAULTS),\n parse\n });\n}\n\nconst VERSION = \"7.0.5\";\n\nconst userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`;\n// DEFAULTS has all properties set that EndpointOptions has, except url.\n// So we use RequestParameters and add method as additional required property.\nconst DEFAULTS = {\n method: \"GET\",\n baseUrl: \"https://api.github.com\",\n headers: {\n accept: \"application/vnd.github.v3+json\",\n \"user-agent\": userAgent\n },\n mediaType: {\n format: \"\",\n previews: []\n }\n};\n\nconst endpoint = withDefaults(null, DEFAULTS);\n\nexports.endpoint = endpoint;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar request = require('@octokit/request');\nvar universalUserAgent = require('universal-user-agent');\n\nconst VERSION = \"5.0.5\";\n\nfunction _buildMessageForResponseErrors(data) {\n return `Request failed due to following response errors:\\n` + data.errors.map(e => ` - ${e.message}`).join(\"\\n\");\n}\nclass GraphqlResponseError extends Error {\n constructor(request, headers, response) {\n super(_buildMessageForResponseErrors(response));\n this.request = request;\n this.headers = headers;\n this.response = response;\n this.name = \"GraphqlResponseError\";\n // Expose the errors and response data in their shorthand properties.\n this.errors = response.errors;\n this.data = response.data;\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n }\n}\n\nconst NON_VARIABLE_OPTIONS = [\"method\", \"baseUrl\", \"url\", \"headers\", \"request\", \"query\", \"mediaType\"];\nconst FORBIDDEN_VARIABLE_OPTIONS = [\"query\", \"method\", \"url\"];\nconst GHES_V3_SUFFIX_REGEX = /\\/api\\/v3\\/?$/;\nfunction graphql(request, query, options) {\n if (options) {\n if (typeof query === \"string\" && \"query\" in options) {\n return Promise.reject(new Error(`[@octokit/graphql] \"query\" cannot be used as variable name`));\n }\n for (const key in options) {\n if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;\n return Promise.reject(new Error(`[@octokit/graphql] \"${key}\" cannot be used as variable name`));\n }\n }\n const parsedOptions = typeof query === \"string\" ? Object.assign({\n query\n }, options) : query;\n const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {\n if (NON_VARIABLE_OPTIONS.includes(key)) {\n result[key] = parsedOptions[key];\n return result;\n }\n if (!result.variables) {\n result.variables = {};\n }\n result.variables[key] = parsedOptions[key];\n return result;\n }, {});\n // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix\n // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451\n const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;\n if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {\n requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, \"/api/graphql\");\n }\n return request(requestOptions).then(response => {\n if (response.data.errors) {\n const headers = {};\n for (const key of Object.keys(response.headers)) {\n headers[key] = response.headers[key];\n }\n throw new GraphqlResponseError(requestOptions, headers, response.data);\n }\n return response.data.data;\n });\n}\n\nfunction withDefaults(request, newDefaults) {\n const newRequest = request.defaults(newDefaults);\n const newApi = (query, options) => {\n return graphql(newRequest, query, options);\n };\n return Object.assign(newApi, {\n defaults: withDefaults.bind(null, newRequest),\n endpoint: newRequest.endpoint\n });\n}\n\nconst graphql$1 = withDefaults(request.request, {\n headers: {\n \"user-agent\": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n },\n method: \"POST\",\n url: \"/graphql\"\n});\nfunction withCustomRequest(customRequest) {\n return withDefaults(customRequest, {\n method: \"POST\",\n url: \"/graphql\"\n });\n}\n\nexports.GraphqlResponseError = GraphqlResponseError;\nexports.graphql = graphql$1;\nexports.withCustomRequest = withCustomRequest;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"6.0.0\";\n\n/**\n * Some “list” response that can be paginated have a different response structure\n *\n * They have a `total_count` key in the response (search also has `incomplete_results`,\n * /installation/repositories also has `repository_selection`), as well as a key with\n * the list of the items which name varies from endpoint to endpoint.\n *\n * Octokit normalizes these responses so that paginated results are always returned following\n * the same structure. One challenge is that if the list response has only one page, no Link\n * header is provided, so this header alone is not sufficient to check wether a response is\n * paginated or not.\n *\n * We check if a \"total_count\" key is present in the response data, but also make sure that\n * a \"url\" property is not, as the \"Get the combined status for a specific ref\" endpoint would\n * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref\n */\nfunction normalizePaginatedListResponse(response) {\n // endpoints can respond with 204 if repository is empty\n if (!response.data) {\n return {\n ...response,\n data: []\n };\n }\n const responseNeedsNormalization = \"total_count\" in response.data && !(\"url\" in response.data);\n if (!responseNeedsNormalization) return response;\n // keep the additional properties intact as there is currently no other way\n // to retrieve the same information.\n const incompleteResults = response.data.incomplete_results;\n const repositorySelection = response.data.repository_selection;\n const totalCount = response.data.total_count;\n delete response.data.incomplete_results;\n delete response.data.repository_selection;\n delete response.data.total_count;\n const namespaceKey = Object.keys(response.data)[0];\n const data = response.data[namespaceKey];\n response.data = data;\n if (typeof incompleteResults !== \"undefined\") {\n response.data.incomplete_results = incompleteResults;\n }\n if (typeof repositorySelection !== \"undefined\") {\n response.data.repository_selection = repositorySelection;\n }\n response.data.total_count = totalCount;\n return response;\n}\n\nfunction iterator(octokit, route, parameters) {\n const options = typeof route === \"function\" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);\n const requestMethod = typeof route === \"function\" ? route : octokit.request;\n const method = options.method;\n const headers = options.headers;\n let url = options.url;\n return {\n [Symbol.asyncIterator]: () => ({\n async next() {\n if (!url) return {\n done: true\n };\n try {\n const response = await requestMethod({\n method,\n url,\n headers\n });\n const normalizedResponse = normalizePaginatedListResponse(response);\n // `response.headers.link` format:\n // '; rel=\"next\", ; rel=\"last\"'\n // sets `url` to undefined if \"next\" URL is not present or `link` header is not set\n url = ((normalizedResponse.headers.link || \"\").match(/<([^>]+)>;\\s*rel=\"next\"/) || [])[1];\n return {\n value: normalizedResponse\n };\n } catch (error) {\n if (error.status !== 409) throw error;\n url = \"\";\n return {\n value: {\n status: 200,\n headers: {},\n data: []\n }\n };\n }\n }\n })\n };\n}\n\nfunction paginate(octokit, route, parameters, mapFn) {\n if (typeof parameters === \"function\") {\n mapFn = parameters;\n parameters = undefined;\n }\n return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);\n}\nfunction gather(octokit, results, iterator, mapFn) {\n return iterator.next().then(result => {\n if (result.done) {\n return results;\n }\n let earlyExit = false;\n function done() {\n earlyExit = true;\n }\n results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);\n if (earlyExit) {\n return results;\n }\n return gather(octokit, results, iterator, mapFn);\n });\n}\n\nconst composePaginateRest = Object.assign(paginate, {\n iterator\n});\n\nconst paginatingEndpoints = [\"GET /app/hook/deliveries\", \"GET /app/installations\", \"GET /enterprises/{enterprise}/actions/runner-groups\", \"GET /enterprises/{enterprise}/dependabot/alerts\", \"GET /enterprises/{enterprise}/secret-scanning/alerts\", \"GET /events\", \"GET /gists\", \"GET /gists/public\", \"GET /gists/starred\", \"GET /gists/{gist_id}/comments\", \"GET /gists/{gist_id}/commits\", \"GET /gists/{gist_id}/forks\", \"GET /installation/repositories\", \"GET /issues\", \"GET /licenses\", \"GET /marketplace_listing/plans\", \"GET /marketplace_listing/plans/{plan_id}/accounts\", \"GET /marketplace_listing/stubbed/plans\", \"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\", \"GET /networks/{owner}/{repo}/events\", \"GET /notifications\", \"GET /organizations\", \"GET /orgs/{org}/actions/cache/usage-by-repository\", \"GET /orgs/{org}/actions/permissions/repositories\", \"GET /orgs/{org}/actions/required_workflows\", \"GET /orgs/{org}/actions/runner-groups\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories\", \"GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners\", \"GET /orgs/{org}/actions/runners\", \"GET /orgs/{org}/actions/secrets\", \"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/actions/variables\", \"GET /orgs/{org}/actions/variables/{name}/repositories\", \"GET /orgs/{org}/blocks\", \"GET /orgs/{org}/code-scanning/alerts\", \"GET /orgs/{org}/codespaces\", \"GET /orgs/{org}/codespaces/secrets\", \"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/dependabot/alerts\", \"GET /orgs/{org}/dependabot/secrets\", \"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\", \"GET /orgs/{org}/events\", \"GET /orgs/{org}/failed_invitations\", \"GET /orgs/{org}/hooks\", \"GET /orgs/{org}/hooks/{hook_id}/deliveries\", \"GET /orgs/{org}/installations\", \"GET /orgs/{org}/invitations\", \"GET /orgs/{org}/invitations/{invitation_id}/teams\", \"GET /orgs/{org}/issues\", \"GET /orgs/{org}/members\", \"GET /orgs/{org}/members/{username}/codespaces\", \"GET /orgs/{org}/migrations\", \"GET /orgs/{org}/migrations/{migration_id}/repositories\", \"GET /orgs/{org}/outside_collaborators\", \"GET /orgs/{org}/packages\", \"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", \"GET /orgs/{org}/projects\", \"GET /orgs/{org}/public_members\", \"GET /orgs/{org}/repos\", \"GET /orgs/{org}/secret-scanning/alerts\", \"GET /orgs/{org}/teams\", \"GET /orgs/{org}/teams/{team_slug}/discussions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\", \"GET /orgs/{org}/teams/{team_slug}/invitations\", \"GET /orgs/{org}/teams/{team_slug}/members\", \"GET /orgs/{org}/teams/{team_slug}/projects\", \"GET /orgs/{org}/teams/{team_slug}/repos\", \"GET /orgs/{org}/teams/{team_slug}/teams\", \"GET /projects/columns/{column_id}/cards\", \"GET /projects/{project_id}/collaborators\", \"GET /projects/{project_id}/columns\", \"GET /repos/{org}/{repo}/actions/required_workflows\", \"GET /repos/{owner}/{repo}/actions/artifacts\", \"GET /repos/{owner}/{repo}/actions/caches\", \"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\", \"GET /repos/{owner}/{repo}/actions/runners\", \"GET /repos/{owner}/{repo}/actions/runs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\", \"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\", \"GET /repos/{owner}/{repo}/actions/secrets\", \"GET /repos/{owner}/{repo}/actions/variables\", \"GET /repos/{owner}/{repo}/actions/workflows\", \"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\", \"GET /repos/{owner}/{repo}/assignees\", \"GET /repos/{owner}/{repo}/branches\", \"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\", \"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\", \"GET /repos/{owner}/{repo}/code-scanning/alerts\", \"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", \"GET /repos/{owner}/{repo}/code-scanning/analyses\", \"GET /repos/{owner}/{repo}/codespaces\", \"GET /repos/{owner}/{repo}/codespaces/devcontainers\", \"GET /repos/{owner}/{repo}/codespaces/secrets\", \"GET /repos/{owner}/{repo}/collaborators\", \"GET /repos/{owner}/{repo}/comments\", \"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/commits\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\", \"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\", \"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\", \"GET /repos/{owner}/{repo}/commits/{ref}/status\", \"GET /repos/{owner}/{repo}/commits/{ref}/statuses\", \"GET /repos/{owner}/{repo}/contributors\", \"GET /repos/{owner}/{repo}/dependabot/alerts\", \"GET /repos/{owner}/{repo}/dependabot/secrets\", \"GET /repos/{owner}/{repo}/deployments\", \"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\", \"GET /repos/{owner}/{repo}/environments\", \"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\", \"GET /repos/{owner}/{repo}/events\", \"GET /repos/{owner}/{repo}/forks\", \"GET /repos/{owner}/{repo}/hooks\", \"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\", \"GET /repos/{owner}/{repo}/invitations\", \"GET /repos/{owner}/{repo}/issues\", \"GET /repos/{owner}/{repo}/issues/comments\", \"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/issues/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/events\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\", \"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\", \"GET /repos/{owner}/{repo}/keys\", \"GET /repos/{owner}/{repo}/labels\", \"GET /repos/{owner}/{repo}/milestones\", \"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\", \"GET /repos/{owner}/{repo}/notifications\", \"GET /repos/{owner}/{repo}/pages/builds\", \"GET /repos/{owner}/{repo}/projects\", \"GET /repos/{owner}/{repo}/pulls\", \"GET /repos/{owner}/{repo}/pulls/comments\", \"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\", \"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\", \"GET /repos/{owner}/{repo}/releases\", \"GET /repos/{owner}/{repo}/releases/{release_id}/assets\", \"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts\", \"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\", \"GET /repos/{owner}/{repo}/stargazers\", \"GET /repos/{owner}/{repo}/subscribers\", \"GET /repos/{owner}/{repo}/tags\", \"GET /repos/{owner}/{repo}/teams\", \"GET /repos/{owner}/{repo}/topics\", \"GET /repositories\", \"GET /repositories/{repository_id}/environments/{environment_name}/secrets\", \"GET /repositories/{repository_id}/environments/{environment_name}/variables\", \"GET /search/code\", \"GET /search/commits\", \"GET /search/issues\", \"GET /search/labels\", \"GET /search/repositories\", \"GET /search/topics\", \"GET /search/users\", \"GET /teams/{team_id}/discussions\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments\", \"GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions\", \"GET /teams/{team_id}/discussions/{discussion_number}/reactions\", \"GET /teams/{team_id}/invitations\", \"GET /teams/{team_id}/members\", \"GET /teams/{team_id}/projects\", \"GET /teams/{team_id}/repos\", \"GET /teams/{team_id}/teams\", \"GET /user/blocks\", \"GET /user/codespaces\", \"GET /user/codespaces/secrets\", \"GET /user/emails\", \"GET /user/followers\", \"GET /user/following\", \"GET /user/gpg_keys\", \"GET /user/installations\", \"GET /user/installations/{installation_id}/repositories\", \"GET /user/issues\", \"GET /user/keys\", \"GET /user/marketplace_purchases\", \"GET /user/marketplace_purchases/stubbed\", \"GET /user/memberships/orgs\", \"GET /user/migrations\", \"GET /user/migrations/{migration_id}/repositories\", \"GET /user/orgs\", \"GET /user/packages\", \"GET /user/packages/{package_type}/{package_name}/versions\", \"GET /user/public_emails\", \"GET /user/repos\", \"GET /user/repository_invitations\", \"GET /user/ssh_signing_keys\", \"GET /user/starred\", \"GET /user/subscriptions\", \"GET /user/teams\", \"GET /users\", \"GET /users/{username}/events\", \"GET /users/{username}/events/orgs/{org}\", \"GET /users/{username}/events/public\", \"GET /users/{username}/followers\", \"GET /users/{username}/following\", \"GET /users/{username}/gists\", \"GET /users/{username}/gpg_keys\", \"GET /users/{username}/keys\", \"GET /users/{username}/orgs\", \"GET /users/{username}/packages\", \"GET /users/{username}/projects\", \"GET /users/{username}/received_events\", \"GET /users/{username}/received_events/public\", \"GET /users/{username}/repos\", \"GET /users/{username}/ssh_signing_keys\", \"GET /users/{username}/starred\", \"GET /users/{username}/subscriptions\"];\n\nfunction isPaginatingEndpoint(arg) {\n if (typeof arg === \"string\") {\n return paginatingEndpoints.includes(arg);\n } else {\n return false;\n }\n}\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\nfunction paginateRest(octokit) {\n return {\n paginate: Object.assign(paginate.bind(null, octokit), {\n iterator: iterator.bind(null, octokit)\n })\n };\n}\npaginateRest.VERSION = VERSION;\n\nexports.composePaginateRest = composePaginateRest;\nexports.isPaginatingEndpoint = isPaginatingEndpoint;\nexports.paginateRest = paginateRest;\nexports.paginatingEndpoints = paginatingEndpoints;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst VERSION = \"1.0.4\";\n\n/**\n * @param octokit Octokit instance\n * @param options Options passed to Octokit constructor\n */\n\nfunction requestLog(octokit) {\n octokit.hook.wrap(\"request\", (request, options) => {\n octokit.log.debug(\"request\", options);\n const start = Date.now();\n const requestOptions = octokit.request.endpoint.parse(options);\n const path = requestOptions.url.replace(options.baseUrl, \"\");\n return request(options).then(response => {\n octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`);\n return response;\n }).catch(error => {\n octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`);\n throw error;\n });\n });\n}\nrequestLog.VERSION = VERSION;\n\nexports.requestLog = requestLog;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nconst Endpoints = {\n actions: {\n addCustomLabelsToSelfHostedRunnerForOrg: [\"POST /orgs/{org}/actions/runners/{runner_id}/labels\"],\n addCustomLabelsToSelfHostedRunnerForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n addSelectedRepoToOrgVariable: [\"PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"],\n addSelectedRepoToRequiredWorkflow: [\"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"],\n approveWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve\"],\n cancelWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel\"],\n createEnvironmentVariable: [\"POST /repositories/{repository_id}/environments/{environment_name}/variables\"],\n createOrUpdateEnvironmentSecret: [\"PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n createOrgVariable: [\"POST /orgs/{org}/actions/variables\"],\n createRegistrationTokenForOrg: [\"POST /orgs/{org}/actions/runners/registration-token\"],\n createRegistrationTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/registration-token\"],\n createRemoveTokenForOrg: [\"POST /orgs/{org}/actions/runners/remove-token\"],\n createRemoveTokenForRepo: [\"POST /repos/{owner}/{repo}/actions/runners/remove-token\"],\n createRepoVariable: [\"POST /repos/{owner}/{repo}/actions/variables\"],\n createRequiredWorkflow: [\"POST /orgs/{org}/actions/required_workflows\"],\n createWorkflowDispatch: [\"POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches\"],\n deleteActionsCacheById: [\"DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}\"],\n deleteActionsCacheByKey: [\"DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}\"],\n deleteArtifact: [\"DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n deleteEnvironmentSecret: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n deleteEnvironmentVariable: [\"DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}\"],\n deleteOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n deleteRepoVariable: [\"DELETE /repos/{owner}/{repo}/actions/variables/{name}\"],\n deleteRequiredWorkflow: [\"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}\"],\n deleteSelfHostedRunnerFromOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}\"],\n deleteSelfHostedRunnerFromRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n deleteWorkflowRun: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n deleteWorkflowRunLogs: [\"DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n disableSelectedRepositoryGithubActionsOrganization: [\"DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n disableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable\"],\n downloadArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}\"],\n downloadJobLogsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs\"],\n downloadWorkflowRunAttemptLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs\"],\n downloadWorkflowRunLogs: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs\"],\n enableSelectedRepositoryGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories/{repository_id}\"],\n enableWorkflow: [\"PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable\"],\n getActionsCacheList: [\"GET /repos/{owner}/{repo}/actions/caches\"],\n getActionsCacheUsage: [\"GET /repos/{owner}/{repo}/actions/cache/usage\"],\n getActionsCacheUsageByRepoForOrg: [\"GET /orgs/{org}/actions/cache/usage-by-repository\"],\n getActionsCacheUsageForOrg: [\"GET /orgs/{org}/actions/cache/usage\"],\n getAllowedActionsOrganization: [\"GET /orgs/{org}/actions/permissions/selected-actions\"],\n getAllowedActionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n getArtifact: [\"GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}\"],\n getEnvironmentPublicKey: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key\"],\n getEnvironmentSecret: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}\"],\n getEnvironmentVariable: [\"GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n getGithubActionsDefaultWorkflowPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions/workflow\"],\n getGithubActionsDefaultWorkflowPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/workflow\"],\n getGithubActionsPermissionsOrganization: [\"GET /orgs/{org}/actions/permissions\"],\n getGithubActionsPermissionsRepository: [\"GET /repos/{owner}/{repo}/actions/permissions\"],\n getJobForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/jobs/{job_id}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/actions/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}\"],\n getOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}\"],\n getPendingDeploymentsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n getRepoPermissions: [\"GET /repos/{owner}/{repo}/actions/permissions\", {}, {\n renamed: [\"actions\", \"getGithubActionsPermissionsRepository\"]\n }],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/actions/secrets/public-key\"],\n getRepoRequiredWorkflow: [\"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}\"],\n getRepoRequiredWorkflowUsage: [\"GET /repos/{org}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/timing\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/actions/secrets/{secret_name}\"],\n getRepoVariable: [\"GET /repos/{owner}/{repo}/actions/variables/{name}\"],\n getRequiredWorkflow: [\"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}\"],\n getReviewsForRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals\"],\n getSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}\"],\n getSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}\"],\n getWorkflow: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}\"],\n getWorkflowAccessToRepository: [\"GET /repos/{owner}/{repo}/actions/permissions/access\"],\n getWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}\"],\n getWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}\"],\n getWorkflowRunUsage: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing\"],\n getWorkflowUsage: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing\"],\n listArtifactsForRepo: [\"GET /repos/{owner}/{repo}/actions/artifacts\"],\n listEnvironmentSecrets: [\"GET /repositories/{repository_id}/environments/{environment_name}/secrets\"],\n listEnvironmentVariables: [\"GET /repositories/{repository_id}/environments/{environment_name}/variables\"],\n listJobsForWorkflowRun: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs\"],\n listJobsForWorkflowRunAttempt: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs\"],\n listLabelsForSelfHostedRunnerForOrg: [\"GET /orgs/{org}/actions/runners/{runner_id}/labels\"],\n listLabelsForSelfHostedRunnerForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n listOrgSecrets: [\"GET /orgs/{org}/actions/secrets\"],\n listOrgVariables: [\"GET /orgs/{org}/actions/variables\"],\n listRepoRequiredWorkflows: [\"GET /repos/{org}/{repo}/actions/required_workflows\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/actions/secrets\"],\n listRepoVariables: [\"GET /repos/{owner}/{repo}/actions/variables\"],\n listRepoWorkflows: [\"GET /repos/{owner}/{repo}/actions/workflows\"],\n listRequiredWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/required_workflows/{required_workflow_id_for_repo}/runs\"],\n listRequiredWorkflows: [\"GET /orgs/{org}/actions/required_workflows\"],\n listRunnerApplicationsForOrg: [\"GET /orgs/{org}/actions/runners/downloads\"],\n listRunnerApplicationsForRepo: [\"GET /repos/{owner}/{repo}/actions/runners/downloads\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n listSelectedReposForOrgVariable: [\"GET /orgs/{org}/actions/variables/{name}/repositories\"],\n listSelectedRepositoriesEnabledGithubActionsOrganization: [\"GET /orgs/{org}/actions/permissions/repositories\"],\n listSelectedRepositoriesRequiredWorkflow: [\"GET /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"],\n listSelfHostedRunnersForOrg: [\"GET /orgs/{org}/actions/runners\"],\n listSelfHostedRunnersForRepo: [\"GET /repos/{owner}/{repo}/actions/runners\"],\n listWorkflowRunArtifacts: [\"GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts\"],\n listWorkflowRuns: [\"GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs\"],\n listWorkflowRunsForRepo: [\"GET /repos/{owner}/{repo}/actions/runs\"],\n reRunJobForWorkflowRun: [\"POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun\"],\n reRunWorkflow: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun\"],\n reRunWorkflowFailedJobs: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs\"],\n removeAllCustomLabelsFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels\"],\n removeAllCustomLabelsFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n removeCustomLabelFromSelfHostedRunnerForOrg: [\"DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}\"],\n removeCustomLabelFromSelfHostedRunnerForRepo: [\"DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}\"],\n removeSelectedRepoFromOrgVariable: [\"DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}\"],\n removeSelectedRepoFromRequiredWorkflow: [\"DELETE /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories/{repository_id}\"],\n reviewPendingDeploymentsForRun: [\"POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments\"],\n setAllowedActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/selected-actions\"],\n setAllowedActionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/selected-actions\"],\n setCustomLabelsForSelfHostedRunnerForOrg: [\"PUT /orgs/{org}/actions/runners/{runner_id}/labels\"],\n setCustomLabelsForSelfHostedRunnerForRepo: [\"PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels\"],\n setGithubActionsDefaultWorkflowPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions/workflow\"],\n setGithubActionsDefaultWorkflowPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/workflow\"],\n setGithubActionsPermissionsOrganization: [\"PUT /orgs/{org}/actions/permissions\"],\n setGithubActionsPermissionsRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/actions/secrets/{secret_name}/repositories\"],\n setSelectedReposForOrgVariable: [\"PUT /orgs/{org}/actions/variables/{name}/repositories\"],\n setSelectedReposToRequiredWorkflow: [\"PUT /orgs/{org}/actions/required_workflows/{required_workflow_id}/repositories\"],\n setSelectedRepositoriesEnabledGithubActionsOrganization: [\"PUT /orgs/{org}/actions/permissions/repositories\"],\n setWorkflowAccessToRepository: [\"PUT /repos/{owner}/{repo}/actions/permissions/access\"],\n updateEnvironmentVariable: [\"PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}\"],\n updateOrgVariable: [\"PATCH /orgs/{org}/actions/variables/{name}\"],\n updateRepoVariable: [\"PATCH /repos/{owner}/{repo}/actions/variables/{name}\"],\n updateRequiredWorkflow: [\"PATCH /orgs/{org}/actions/required_workflows/{required_workflow_id}\"]\n },\n activity: {\n checkRepoIsStarredByAuthenticatedUser: [\"GET /user/starred/{owner}/{repo}\"],\n deleteRepoSubscription: [\"DELETE /repos/{owner}/{repo}/subscription\"],\n deleteThreadSubscription: [\"DELETE /notifications/threads/{thread_id}/subscription\"],\n getFeeds: [\"GET /feeds\"],\n getRepoSubscription: [\"GET /repos/{owner}/{repo}/subscription\"],\n getThread: [\"GET /notifications/threads/{thread_id}\"],\n getThreadSubscriptionForAuthenticatedUser: [\"GET /notifications/threads/{thread_id}/subscription\"],\n listEventsForAuthenticatedUser: [\"GET /users/{username}/events\"],\n listNotificationsForAuthenticatedUser: [\"GET /notifications\"],\n listOrgEventsForAuthenticatedUser: [\"GET /users/{username}/events/orgs/{org}\"],\n listPublicEvents: [\"GET /events\"],\n listPublicEventsForRepoNetwork: [\"GET /networks/{owner}/{repo}/events\"],\n listPublicEventsForUser: [\"GET /users/{username}/events/public\"],\n listPublicOrgEvents: [\"GET /orgs/{org}/events\"],\n listReceivedEventsForUser: [\"GET /users/{username}/received_events\"],\n listReceivedPublicEventsForUser: [\"GET /users/{username}/received_events/public\"],\n listRepoEvents: [\"GET /repos/{owner}/{repo}/events\"],\n listRepoNotificationsForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/notifications\"],\n listReposStarredByAuthenticatedUser: [\"GET /user/starred\"],\n listReposStarredByUser: [\"GET /users/{username}/starred\"],\n listReposWatchedByUser: [\"GET /users/{username}/subscriptions\"],\n listStargazersForRepo: [\"GET /repos/{owner}/{repo}/stargazers\"],\n listWatchedReposForAuthenticatedUser: [\"GET /user/subscriptions\"],\n listWatchersForRepo: [\"GET /repos/{owner}/{repo}/subscribers\"],\n markNotificationsAsRead: [\"PUT /notifications\"],\n markRepoNotificationsAsRead: [\"PUT /repos/{owner}/{repo}/notifications\"],\n markThreadAsRead: [\"PATCH /notifications/threads/{thread_id}\"],\n setRepoSubscription: [\"PUT /repos/{owner}/{repo}/subscription\"],\n setThreadSubscription: [\"PUT /notifications/threads/{thread_id}/subscription\"],\n starRepoForAuthenticatedUser: [\"PUT /user/starred/{owner}/{repo}\"],\n unstarRepoForAuthenticatedUser: [\"DELETE /user/starred/{owner}/{repo}\"]\n },\n apps: {\n addRepoToInstallation: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"addRepoToInstallationForAuthenticatedUser\"]\n }],\n addRepoToInstallationForAuthenticatedUser: [\"PUT /user/installations/{installation_id}/repositories/{repository_id}\"],\n checkToken: [\"POST /applications/{client_id}/token\"],\n createFromManifest: [\"POST /app-manifests/{code}/conversions\"],\n createInstallationAccessToken: [\"POST /app/installations/{installation_id}/access_tokens\"],\n deleteAuthorization: [\"DELETE /applications/{client_id}/grant\"],\n deleteInstallation: [\"DELETE /app/installations/{installation_id}\"],\n deleteToken: [\"DELETE /applications/{client_id}/token\"],\n getAuthenticated: [\"GET /app\"],\n getBySlug: [\"GET /apps/{app_slug}\"],\n getInstallation: [\"GET /app/installations/{installation_id}\"],\n getOrgInstallation: [\"GET /orgs/{org}/installation\"],\n getRepoInstallation: [\"GET /repos/{owner}/{repo}/installation\"],\n getSubscriptionPlanForAccount: [\"GET /marketplace_listing/accounts/{account_id}\"],\n getSubscriptionPlanForAccountStubbed: [\"GET /marketplace_listing/stubbed/accounts/{account_id}\"],\n getUserInstallation: [\"GET /users/{username}/installation\"],\n getWebhookConfigForApp: [\"GET /app/hook/config\"],\n getWebhookDelivery: [\"GET /app/hook/deliveries/{delivery_id}\"],\n listAccountsForPlan: [\"GET /marketplace_listing/plans/{plan_id}/accounts\"],\n listAccountsForPlanStubbed: [\"GET /marketplace_listing/stubbed/plans/{plan_id}/accounts\"],\n listInstallationReposForAuthenticatedUser: [\"GET /user/installations/{installation_id}/repositories\"],\n listInstallations: [\"GET /app/installations\"],\n listInstallationsForAuthenticatedUser: [\"GET /user/installations\"],\n listPlans: [\"GET /marketplace_listing/plans\"],\n listPlansStubbed: [\"GET /marketplace_listing/stubbed/plans\"],\n listReposAccessibleToInstallation: [\"GET /installation/repositories\"],\n listSubscriptionsForAuthenticatedUser: [\"GET /user/marketplace_purchases\"],\n listSubscriptionsForAuthenticatedUserStubbed: [\"GET /user/marketplace_purchases/stubbed\"],\n listWebhookDeliveries: [\"GET /app/hook/deliveries\"],\n redeliverWebhookDelivery: [\"POST /app/hook/deliveries/{delivery_id}/attempts\"],\n removeRepoFromInstallation: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\", {}, {\n renamed: [\"apps\", \"removeRepoFromInstallationForAuthenticatedUser\"]\n }],\n removeRepoFromInstallationForAuthenticatedUser: [\"DELETE /user/installations/{installation_id}/repositories/{repository_id}\"],\n resetToken: [\"PATCH /applications/{client_id}/token\"],\n revokeInstallationAccessToken: [\"DELETE /installation/token\"],\n scopeToken: [\"POST /applications/{client_id}/token/scoped\"],\n suspendInstallation: [\"PUT /app/installations/{installation_id}/suspended\"],\n unsuspendInstallation: [\"DELETE /app/installations/{installation_id}/suspended\"],\n updateWebhookConfigForApp: [\"PATCH /app/hook/config\"]\n },\n billing: {\n getGithubActionsBillingOrg: [\"GET /orgs/{org}/settings/billing/actions\"],\n getGithubActionsBillingUser: [\"GET /users/{username}/settings/billing/actions\"],\n getGithubPackagesBillingOrg: [\"GET /orgs/{org}/settings/billing/packages\"],\n getGithubPackagesBillingUser: [\"GET /users/{username}/settings/billing/packages\"],\n getSharedStorageBillingOrg: [\"GET /orgs/{org}/settings/billing/shared-storage\"],\n getSharedStorageBillingUser: [\"GET /users/{username}/settings/billing/shared-storage\"]\n },\n checks: {\n create: [\"POST /repos/{owner}/{repo}/check-runs\"],\n createSuite: [\"POST /repos/{owner}/{repo}/check-suites\"],\n get: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}\"],\n getSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}\"],\n listAnnotations: [\"GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations\"],\n listForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-runs\"],\n listForSuite: [\"GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs\"],\n listSuitesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/check-suites\"],\n rerequestRun: [\"POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest\"],\n rerequestSuite: [\"POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest\"],\n setSuitesPreferences: [\"PATCH /repos/{owner}/{repo}/check-suites/preferences\"],\n update: [\"PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}\"]\n },\n codeScanning: {\n deleteAnalysis: [\"DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\", {}, {\n renamedParameters: {\n alert_id: \"alert_number\"\n }\n }],\n getAnalysis: [\"GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}\"],\n getCodeqlDatabase: [\"GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}\"],\n getSarif: [\"GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}\"],\n listAlertInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\"],\n listAlertsForOrg: [\"GET /orgs/{org}/code-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/code-scanning/alerts\"],\n listAlertsInstances: [\"GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances\", {}, {\n renamed: [\"codeScanning\", \"listAlertInstances\"]\n }],\n listCodeqlDatabases: [\"GET /repos/{owner}/{repo}/code-scanning/codeql/databases\"],\n listRecentAnalyses: [\"GET /repos/{owner}/{repo}/code-scanning/analyses\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}\"],\n uploadSarif: [\"POST /repos/{owner}/{repo}/code-scanning/sarifs\"]\n },\n codesOfConduct: {\n getAllCodesOfConduct: [\"GET /codes_of_conduct\"],\n getConductCode: [\"GET /codes_of_conduct/{key}\"]\n },\n codespaces: {\n addRepositoryForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n codespaceMachinesForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/machines\"],\n createForAuthenticatedUser: [\"POST /user/codespaces\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n createOrUpdateSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}\"],\n createWithPrForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces\"],\n createWithRepoForAuthenticatedUser: [\"POST /repos/{owner}/{repo}/codespaces\"],\n deleteForAuthenticatedUser: [\"DELETE /user/codespaces/{codespace_name}\"],\n deleteFromOrganization: [\"DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n deleteSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}\"],\n exportForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/exports\"],\n getCodespacesForUserInOrg: [\"GET /orgs/{org}/members/{username}/codespaces\"],\n getExportDetailsForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}/exports/{export_id}\"],\n getForAuthenticatedUser: [\"GET /user/codespaces/{codespace_name}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/codespaces/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}\"],\n getPublicKeyForAuthenticatedUser: [\"GET /user/codespaces/secrets/public-key\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/codespaces/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}\"],\n getSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}\"],\n listDevcontainersInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/devcontainers\"],\n listForAuthenticatedUser: [\"GET /user/codespaces\"],\n listInOrganization: [\"GET /orgs/{org}/codespaces\", {}, {\n renamedParameters: {\n org_id: \"org\"\n }\n }],\n listInRepositoryForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces\"],\n listOrgSecrets: [\"GET /orgs/{org}/codespaces/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/codespaces/secrets\"],\n listRepositoriesForSecretForAuthenticatedUser: [\"GET /user/codespaces/secrets/{secret_name}/repositories\"],\n listSecretsForAuthenticatedUser: [\"GET /user/codespaces/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"],\n preFlightWithRepoForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/new\"],\n publishForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/publish\"],\n removeRepositoryForSecretForAuthenticatedUser: [\"DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}\"],\n repoMachinesForAuthenticatedUser: [\"GET /repos/{owner}/{repo}/codespaces/machines\"],\n setCodespacesBilling: [\"PUT /orgs/{org}/codespaces/billing\"],\n setRepositoriesForSecretForAuthenticatedUser: [\"PUT /user/codespaces/secrets/{secret_name}/repositories\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories\"],\n startForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/start\"],\n stopForAuthenticatedUser: [\"POST /user/codespaces/{codespace_name}/stop\"],\n stopInOrganization: [\"POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop\"],\n updateForAuthenticatedUser: [\"PATCH /user/codespaces/{codespace_name}\"]\n },\n dependabot: {\n addSelectedRepoToOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n createOrUpdateOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}\"],\n createOrUpdateRepoSecret: [\"PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n deleteOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}\"],\n deleteRepoSecret: [\"DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n getAlert: [\"GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"],\n getOrgPublicKey: [\"GET /orgs/{org}/dependabot/secrets/public-key\"],\n getOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}\"],\n getRepoPublicKey: [\"GET /repos/{owner}/{repo}/dependabot/secrets/public-key\"],\n getRepoSecret: [\"GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/dependabot/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/dependabot/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/dependabot/alerts\"],\n listOrgSecrets: [\"GET /orgs/{org}/dependabot/secrets\"],\n listRepoSecrets: [\"GET /repos/{owner}/{repo}/dependabot/secrets\"],\n listSelectedReposForOrgSecret: [\"GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n removeSelectedRepoFromOrgSecret: [\"DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}\"],\n setSelectedReposForOrgSecret: [\"PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}\"]\n },\n dependencyGraph: {\n createRepositorySnapshot: [\"POST /repos/{owner}/{repo}/dependency-graph/snapshots\"],\n diffRange: [\"GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}\"]\n },\n emojis: {\n get: [\"GET /emojis\"]\n },\n enterpriseAdmin: {\n addCustomLabelsToSelfHostedRunnerForEnterprise: [\"POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"],\n enableSelectedOrganizationGithubActionsEnterprise: [\"PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}\"],\n listLabelsForSelfHostedRunnerForEnterprise: [\"GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels\"]\n },\n gists: {\n checkIsStarred: [\"GET /gists/{gist_id}/star\"],\n create: [\"POST /gists\"],\n createComment: [\"POST /gists/{gist_id}/comments\"],\n delete: [\"DELETE /gists/{gist_id}\"],\n deleteComment: [\"DELETE /gists/{gist_id}/comments/{comment_id}\"],\n fork: [\"POST /gists/{gist_id}/forks\"],\n get: [\"GET /gists/{gist_id}\"],\n getComment: [\"GET /gists/{gist_id}/comments/{comment_id}\"],\n getRevision: [\"GET /gists/{gist_id}/{sha}\"],\n list: [\"GET /gists\"],\n listComments: [\"GET /gists/{gist_id}/comments\"],\n listCommits: [\"GET /gists/{gist_id}/commits\"],\n listForUser: [\"GET /users/{username}/gists\"],\n listForks: [\"GET /gists/{gist_id}/forks\"],\n listPublic: [\"GET /gists/public\"],\n listStarred: [\"GET /gists/starred\"],\n star: [\"PUT /gists/{gist_id}/star\"],\n unstar: [\"DELETE /gists/{gist_id}/star\"],\n update: [\"PATCH /gists/{gist_id}\"],\n updateComment: [\"PATCH /gists/{gist_id}/comments/{comment_id}\"]\n },\n git: {\n createBlob: [\"POST /repos/{owner}/{repo}/git/blobs\"],\n createCommit: [\"POST /repos/{owner}/{repo}/git/commits\"],\n createRef: [\"POST /repos/{owner}/{repo}/git/refs\"],\n createTag: [\"POST /repos/{owner}/{repo}/git/tags\"],\n createTree: [\"POST /repos/{owner}/{repo}/git/trees\"],\n deleteRef: [\"DELETE /repos/{owner}/{repo}/git/refs/{ref}\"],\n getBlob: [\"GET /repos/{owner}/{repo}/git/blobs/{file_sha}\"],\n getCommit: [\"GET /repos/{owner}/{repo}/git/commits/{commit_sha}\"],\n getRef: [\"GET /repos/{owner}/{repo}/git/ref/{ref}\"],\n getTag: [\"GET /repos/{owner}/{repo}/git/tags/{tag_sha}\"],\n getTree: [\"GET /repos/{owner}/{repo}/git/trees/{tree_sha}\"],\n listMatchingRefs: [\"GET /repos/{owner}/{repo}/git/matching-refs/{ref}\"],\n updateRef: [\"PATCH /repos/{owner}/{repo}/git/refs/{ref}\"]\n },\n gitignore: {\n getAllTemplates: [\"GET /gitignore/templates\"],\n getTemplate: [\"GET /gitignore/templates/{name}\"]\n },\n interactions: {\n getRestrictionsForAuthenticatedUser: [\"GET /user/interaction-limits\"],\n getRestrictionsForOrg: [\"GET /orgs/{org}/interaction-limits\"],\n getRestrictionsForRepo: [\"GET /repos/{owner}/{repo}/interaction-limits\"],\n getRestrictionsForYourPublicRepos: [\"GET /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"getRestrictionsForAuthenticatedUser\"]\n }],\n removeRestrictionsForAuthenticatedUser: [\"DELETE /user/interaction-limits\"],\n removeRestrictionsForOrg: [\"DELETE /orgs/{org}/interaction-limits\"],\n removeRestrictionsForRepo: [\"DELETE /repos/{owner}/{repo}/interaction-limits\"],\n removeRestrictionsForYourPublicRepos: [\"DELETE /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"removeRestrictionsForAuthenticatedUser\"]\n }],\n setRestrictionsForAuthenticatedUser: [\"PUT /user/interaction-limits\"],\n setRestrictionsForOrg: [\"PUT /orgs/{org}/interaction-limits\"],\n setRestrictionsForRepo: [\"PUT /repos/{owner}/{repo}/interaction-limits\"],\n setRestrictionsForYourPublicRepos: [\"PUT /user/interaction-limits\", {}, {\n renamed: [\"interactions\", \"setRestrictionsForAuthenticatedUser\"]\n }]\n },\n issues: {\n addAssignees: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n addLabels: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n checkUserCanBeAssigned: [\"GET /repos/{owner}/{repo}/assignees/{assignee}\"],\n checkUserCanBeAssignedToIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}\"],\n create: [\"POST /repos/{owner}/{repo}/issues\"],\n createComment: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n createLabel: [\"POST /repos/{owner}/{repo}/labels\"],\n createMilestone: [\"POST /repos/{owner}/{repo}/milestones\"],\n deleteComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n deleteLabel: [\"DELETE /repos/{owner}/{repo}/labels/{name}\"],\n deleteMilestone: [\"DELETE /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n get: [\"GET /repos/{owner}/{repo}/issues/{issue_number}\"],\n getComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n getEvent: [\"GET /repos/{owner}/{repo}/issues/events/{event_id}\"],\n getLabel: [\"GET /repos/{owner}/{repo}/labels/{name}\"],\n getMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}\"],\n list: [\"GET /issues\"],\n listAssignees: [\"GET /repos/{owner}/{repo}/assignees\"],\n listComments: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/comments\"],\n listCommentsForRepo: [\"GET /repos/{owner}/{repo}/issues/comments\"],\n listEvents: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/events\"],\n listEventsForRepo: [\"GET /repos/{owner}/{repo}/issues/events\"],\n listEventsForTimeline: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/timeline\"],\n listForAuthenticatedUser: [\"GET /user/issues\"],\n listForOrg: [\"GET /orgs/{org}/issues\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/issues\"],\n listLabelsForMilestone: [\"GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels\"],\n listLabelsForRepo: [\"GET /repos/{owner}/{repo}/labels\"],\n listLabelsOnIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n listMilestones: [\"GET /repos/{owner}/{repo}/milestones\"],\n lock: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n removeAllLabels: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n removeAssignees: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees\"],\n removeLabel: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}\"],\n setLabels: [\"PUT /repos/{owner}/{repo}/issues/{issue_number}/labels\"],\n unlock: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock\"],\n update: [\"PATCH /repos/{owner}/{repo}/issues/{issue_number}\"],\n updateComment: [\"PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}\"],\n updateLabel: [\"PATCH /repos/{owner}/{repo}/labels/{name}\"],\n updateMilestone: [\"PATCH /repos/{owner}/{repo}/milestones/{milestone_number}\"]\n },\n licenses: {\n get: [\"GET /licenses/{license}\"],\n getAllCommonlyUsed: [\"GET /licenses\"],\n getForRepo: [\"GET /repos/{owner}/{repo}/license\"]\n },\n markdown: {\n render: [\"POST /markdown\"],\n renderRaw: [\"POST /markdown/raw\", {\n headers: {\n \"content-type\": \"text/plain; charset=utf-8\"\n }\n }]\n },\n meta: {\n get: [\"GET /meta\"],\n getAllVersions: [\"GET /versions\"],\n getOctocat: [\"GET /octocat\"],\n getZen: [\"GET /zen\"],\n root: [\"GET /\"]\n },\n migrations: {\n cancelImport: [\"DELETE /repos/{owner}/{repo}/import\"],\n deleteArchiveForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/archive\"],\n deleteArchiveForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/archive\"],\n downloadArchiveForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/archive\"],\n getArchiveForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/archive\"],\n getCommitAuthors: [\"GET /repos/{owner}/{repo}/import/authors\"],\n getImportStatus: [\"GET /repos/{owner}/{repo}/import\"],\n getLargeFiles: [\"GET /repos/{owner}/{repo}/import/large_files\"],\n getStatusForAuthenticatedUser: [\"GET /user/migrations/{migration_id}\"],\n getStatusForOrg: [\"GET /orgs/{org}/migrations/{migration_id}\"],\n listForAuthenticatedUser: [\"GET /user/migrations\"],\n listForOrg: [\"GET /orgs/{org}/migrations\"],\n listReposForAuthenticatedUser: [\"GET /user/migrations/{migration_id}/repositories\"],\n listReposForOrg: [\"GET /orgs/{org}/migrations/{migration_id}/repositories\"],\n listReposForUser: [\"GET /user/migrations/{migration_id}/repositories\", {}, {\n renamed: [\"migrations\", \"listReposForAuthenticatedUser\"]\n }],\n mapCommitAuthor: [\"PATCH /repos/{owner}/{repo}/import/authors/{author_id}\"],\n setLfsPreference: [\"PATCH /repos/{owner}/{repo}/import/lfs\"],\n startForAuthenticatedUser: [\"POST /user/migrations\"],\n startForOrg: [\"POST /orgs/{org}/migrations\"],\n startImport: [\"PUT /repos/{owner}/{repo}/import\"],\n unlockRepoForAuthenticatedUser: [\"DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock\"],\n unlockRepoForOrg: [\"DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock\"],\n updateImport: [\"PATCH /repos/{owner}/{repo}/import\"]\n },\n orgs: {\n addSecurityManagerTeam: [\"PUT /orgs/{org}/security-managers/teams/{team_slug}\"],\n blockUser: [\"PUT /orgs/{org}/blocks/{username}\"],\n cancelInvitation: [\"DELETE /orgs/{org}/invitations/{invitation_id}\"],\n checkBlockedUser: [\"GET /orgs/{org}/blocks/{username}\"],\n checkMembershipForUser: [\"GET /orgs/{org}/members/{username}\"],\n checkPublicMembershipForUser: [\"GET /orgs/{org}/public_members/{username}\"],\n convertMemberToOutsideCollaborator: [\"PUT /orgs/{org}/outside_collaborators/{username}\"],\n createInvitation: [\"POST /orgs/{org}/invitations\"],\n createWebhook: [\"POST /orgs/{org}/hooks\"],\n deleteWebhook: [\"DELETE /orgs/{org}/hooks/{hook_id}\"],\n enableOrDisableSecurityProductOnAllOrgRepos: [\"POST /orgs/{org}/{security_product}/{enablement}\"],\n get: [\"GET /orgs/{org}\"],\n getMembershipForAuthenticatedUser: [\"GET /user/memberships/orgs/{org}\"],\n getMembershipForUser: [\"GET /orgs/{org}/memberships/{username}\"],\n getWebhook: [\"GET /orgs/{org}/hooks/{hook_id}\"],\n getWebhookConfigForOrg: [\"GET /orgs/{org}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n list: [\"GET /organizations\"],\n listAppInstallations: [\"GET /orgs/{org}/installations\"],\n listBlockedUsers: [\"GET /orgs/{org}/blocks\"],\n listFailedInvitations: [\"GET /orgs/{org}/failed_invitations\"],\n listForAuthenticatedUser: [\"GET /user/orgs\"],\n listForUser: [\"GET /users/{username}/orgs\"],\n listInvitationTeams: [\"GET /orgs/{org}/invitations/{invitation_id}/teams\"],\n listMembers: [\"GET /orgs/{org}/members\"],\n listMembershipsForAuthenticatedUser: [\"GET /user/memberships/orgs\"],\n listOutsideCollaborators: [\"GET /orgs/{org}/outside_collaborators\"],\n listPendingInvitations: [\"GET /orgs/{org}/invitations\"],\n listPublicMembers: [\"GET /orgs/{org}/public_members\"],\n listSecurityManagerTeams: [\"GET /orgs/{org}/security-managers\"],\n listWebhookDeliveries: [\"GET /orgs/{org}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /orgs/{org}/hooks\"],\n pingWebhook: [\"POST /orgs/{org}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeMember: [\"DELETE /orgs/{org}/members/{username}\"],\n removeMembershipForUser: [\"DELETE /orgs/{org}/memberships/{username}\"],\n removeOutsideCollaborator: [\"DELETE /orgs/{org}/outside_collaborators/{username}\"],\n removePublicMembershipForAuthenticatedUser: [\"DELETE /orgs/{org}/public_members/{username}\"],\n removeSecurityManagerTeam: [\"DELETE /orgs/{org}/security-managers/teams/{team_slug}\"],\n setMembershipForUser: [\"PUT /orgs/{org}/memberships/{username}\"],\n setPublicMembershipForAuthenticatedUser: [\"PUT /orgs/{org}/public_members/{username}\"],\n unblockUser: [\"DELETE /orgs/{org}/blocks/{username}\"],\n update: [\"PATCH /orgs/{org}\"],\n updateMembershipForAuthenticatedUser: [\"PATCH /user/memberships/orgs/{org}\"],\n updateWebhook: [\"PATCH /orgs/{org}/hooks/{hook_id}\"],\n updateWebhookConfigForOrg: [\"PATCH /orgs/{org}/hooks/{hook_id}/config\"]\n },\n packages: {\n deletePackageForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}\"],\n deletePackageForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}\"],\n deletePackageForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}\"],\n deletePackageVersionForAuthenticatedUser: [\"DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForOrg: [\"DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n deletePackageVersionForUser: [\"DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getAllPackageVersionsForAPackageOwnedByAnOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByOrg\"]\n }],\n getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\", {}, {\n renamed: [\"packages\", \"getAllPackageVersionsForPackageOwnedByAuthenticatedUser\"]\n }],\n getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByOrg: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions\"],\n getAllPackageVersionsForPackageOwnedByUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions\"],\n getPackageForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}\"],\n getPackageForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}\"],\n getPackageForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}\"],\n getPackageVersionForAuthenticatedUser: [\"GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForOrganization: [\"GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n getPackageVersionForUser: [\"GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}\"],\n listPackagesForAuthenticatedUser: [\"GET /user/packages\"],\n listPackagesForOrganization: [\"GET /orgs/{org}/packages\"],\n listPackagesForUser: [\"GET /users/{username}/packages\"],\n restorePackageForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}\"],\n restorePackageVersionForAuthenticatedUser: [\"POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForOrg: [\"POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"],\n restorePackageVersionForUser: [\"POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore\"]\n },\n projects: {\n addCollaborator: [\"PUT /projects/{project_id}/collaborators/{username}\"],\n createCard: [\"POST /projects/columns/{column_id}/cards\"],\n createColumn: [\"POST /projects/{project_id}/columns\"],\n createForAuthenticatedUser: [\"POST /user/projects\"],\n createForOrg: [\"POST /orgs/{org}/projects\"],\n createForRepo: [\"POST /repos/{owner}/{repo}/projects\"],\n delete: [\"DELETE /projects/{project_id}\"],\n deleteCard: [\"DELETE /projects/columns/cards/{card_id}\"],\n deleteColumn: [\"DELETE /projects/columns/{column_id}\"],\n get: [\"GET /projects/{project_id}\"],\n getCard: [\"GET /projects/columns/cards/{card_id}\"],\n getColumn: [\"GET /projects/columns/{column_id}\"],\n getPermissionForUser: [\"GET /projects/{project_id}/collaborators/{username}/permission\"],\n listCards: [\"GET /projects/columns/{column_id}/cards\"],\n listCollaborators: [\"GET /projects/{project_id}/collaborators\"],\n listColumns: [\"GET /projects/{project_id}/columns\"],\n listForOrg: [\"GET /orgs/{org}/projects\"],\n listForRepo: [\"GET /repos/{owner}/{repo}/projects\"],\n listForUser: [\"GET /users/{username}/projects\"],\n moveCard: [\"POST /projects/columns/cards/{card_id}/moves\"],\n moveColumn: [\"POST /projects/columns/{column_id}/moves\"],\n removeCollaborator: [\"DELETE /projects/{project_id}/collaborators/{username}\"],\n update: [\"PATCH /projects/{project_id}\"],\n updateCard: [\"PATCH /projects/columns/cards/{card_id}\"],\n updateColumn: [\"PATCH /projects/columns/{column_id}\"]\n },\n pulls: {\n checkIfMerged: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n create: [\"POST /repos/{owner}/{repo}/pulls\"],\n createReplyForReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies\"],\n createReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n createReviewComment: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n deletePendingReview: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n deleteReviewComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n dismissReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals\"],\n get: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}\"],\n getReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n getReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}\"],\n list: [\"GET /repos/{owner}/{repo}/pulls\"],\n listCommentsForReview: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments\"],\n listCommits: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/commits\"],\n listFiles: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/files\"],\n listRequestedReviewers: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n listReviewComments: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/comments\"],\n listReviewCommentsForRepo: [\"GET /repos/{owner}/{repo}/pulls/comments\"],\n listReviews: [\"GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews\"],\n merge: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge\"],\n removeRequestedReviewers: [\"DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n requestReviewers: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers\"],\n submitReview: [\"POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events\"],\n update: [\"PATCH /repos/{owner}/{repo}/pulls/{pull_number}\"],\n updateBranch: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch\"],\n updateReview: [\"PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}\"],\n updateReviewComment: [\"PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}\"]\n },\n rateLimit: {\n get: [\"GET /rate_limit\"]\n },\n reactions: {\n createForCommitComment: [\"POST /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n createForIssue: [\"POST /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n createForIssueComment: [\"POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n createForPullRequestReviewComment: [\"POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n createForRelease: [\"POST /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n createForTeamDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n createForTeamDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"],\n deleteForCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForIssue: [\"DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}\"],\n deleteForIssueComment: [\"DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForPullRequestComment: [\"DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}\"],\n deleteForRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}\"],\n deleteForTeamDiscussion: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}\"],\n deleteForTeamDiscussionComment: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}\"],\n listForCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}/reactions\"],\n listForIssue: [\"GET /repos/{owner}/{repo}/issues/{issue_number}/reactions\"],\n listForIssueComment: [\"GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions\"],\n listForPullRequestReviewComment: [\"GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions\"],\n listForRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}/reactions\"],\n listForTeamDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions\"],\n listForTeamDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions\"]\n },\n repos: {\n acceptInvitation: [\"PATCH /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"acceptInvitationForAuthenticatedUser\"]\n }],\n acceptInvitationForAuthenticatedUser: [\"PATCH /user/repository_invitations/{invitation_id}\"],\n addAppAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n addCollaborator: [\"PUT /repos/{owner}/{repo}/collaborators/{username}\"],\n addStatusCheckContexts: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n addTeamAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n addUserAccessRestrictions: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n checkCollaborator: [\"GET /repos/{owner}/{repo}/collaborators/{username}\"],\n checkVulnerabilityAlerts: [\"GET /repos/{owner}/{repo}/vulnerability-alerts\"],\n codeownersErrors: [\"GET /repos/{owner}/{repo}/codeowners/errors\"],\n compareCommits: [\"GET /repos/{owner}/{repo}/compare/{base}...{head}\"],\n compareCommitsWithBasehead: [\"GET /repos/{owner}/{repo}/compare/{basehead}\"],\n createAutolink: [\"POST /repos/{owner}/{repo}/autolinks\"],\n createCommitComment: [\"POST /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n createCommitSignatureProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n createCommitStatus: [\"POST /repos/{owner}/{repo}/statuses/{sha}\"],\n createDeployKey: [\"POST /repos/{owner}/{repo}/keys\"],\n createDeployment: [\"POST /repos/{owner}/{repo}/deployments\"],\n createDeploymentBranchPolicy: [\"POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"],\n createDeploymentStatus: [\"POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n createDispatchEvent: [\"POST /repos/{owner}/{repo}/dispatches\"],\n createForAuthenticatedUser: [\"POST /user/repos\"],\n createFork: [\"POST /repos/{owner}/{repo}/forks\"],\n createInOrg: [\"POST /orgs/{org}/repos\"],\n createOrUpdateEnvironment: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}\"],\n createOrUpdateFileContents: [\"PUT /repos/{owner}/{repo}/contents/{path}\"],\n createPagesDeployment: [\"POST /repos/{owner}/{repo}/pages/deployment\"],\n createPagesSite: [\"POST /repos/{owner}/{repo}/pages\"],\n createRelease: [\"POST /repos/{owner}/{repo}/releases\"],\n createTagProtection: [\"POST /repos/{owner}/{repo}/tags/protection\"],\n createUsingTemplate: [\"POST /repos/{template_owner}/{template_repo}/generate\"],\n createWebhook: [\"POST /repos/{owner}/{repo}/hooks\"],\n declineInvitation: [\"DELETE /user/repository_invitations/{invitation_id}\", {}, {\n renamed: [\"repos\", \"declineInvitationForAuthenticatedUser\"]\n }],\n declineInvitationForAuthenticatedUser: [\"DELETE /user/repository_invitations/{invitation_id}\"],\n delete: [\"DELETE /repos/{owner}/{repo}\"],\n deleteAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n deleteAdminBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n deleteAnEnvironment: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}\"],\n deleteAutolink: [\"DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n deleteBranchProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection\"],\n deleteCommitComment: [\"DELETE /repos/{owner}/{repo}/comments/{comment_id}\"],\n deleteCommitSignatureProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n deleteDeployKey: [\"DELETE /repos/{owner}/{repo}/keys/{key_id}\"],\n deleteDeployment: [\"DELETE /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n deleteDeploymentBranchPolicy: [\"DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n deleteFile: [\"DELETE /repos/{owner}/{repo}/contents/{path}\"],\n deleteInvitation: [\"DELETE /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n deletePagesSite: [\"DELETE /repos/{owner}/{repo}/pages\"],\n deletePullRequestReviewProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n deleteRelease: [\"DELETE /repos/{owner}/{repo}/releases/{release_id}\"],\n deleteReleaseAsset: [\"DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n deleteTagProtection: [\"DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}\"],\n deleteWebhook: [\"DELETE /repos/{owner}/{repo}/hooks/{hook_id}\"],\n disableAutomatedSecurityFixes: [\"DELETE /repos/{owner}/{repo}/automated-security-fixes\"],\n disableLfsForRepo: [\"DELETE /repos/{owner}/{repo}/lfs\"],\n disableVulnerabilityAlerts: [\"DELETE /repos/{owner}/{repo}/vulnerability-alerts\"],\n downloadArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\", {}, {\n renamed: [\"repos\", \"downloadZipballArchive\"]\n }],\n downloadTarballArchive: [\"GET /repos/{owner}/{repo}/tarball/{ref}\"],\n downloadZipballArchive: [\"GET /repos/{owner}/{repo}/zipball/{ref}\"],\n enableAutomatedSecurityFixes: [\"PUT /repos/{owner}/{repo}/automated-security-fixes\"],\n enableLfsForRepo: [\"PUT /repos/{owner}/{repo}/lfs\"],\n enableVulnerabilityAlerts: [\"PUT /repos/{owner}/{repo}/vulnerability-alerts\"],\n generateReleaseNotes: [\"POST /repos/{owner}/{repo}/releases/generate-notes\"],\n get: [\"GET /repos/{owner}/{repo}\"],\n getAccessRestrictions: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions\"],\n getAdminBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n getAllEnvironments: [\"GET /repos/{owner}/{repo}/environments\"],\n getAllStatusCheckContexts: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\"],\n getAllTopics: [\"GET /repos/{owner}/{repo}/topics\"],\n getAppsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\"],\n getAutolink: [\"GET /repos/{owner}/{repo}/autolinks/{autolink_id}\"],\n getBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}\"],\n getBranchProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection\"],\n getClones: [\"GET /repos/{owner}/{repo}/traffic/clones\"],\n getCodeFrequencyStats: [\"GET /repos/{owner}/{repo}/stats/code_frequency\"],\n getCollaboratorPermissionLevel: [\"GET /repos/{owner}/{repo}/collaborators/{username}/permission\"],\n getCombinedStatusForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/status\"],\n getCommit: [\"GET /repos/{owner}/{repo}/commits/{ref}\"],\n getCommitActivityStats: [\"GET /repos/{owner}/{repo}/stats/commit_activity\"],\n getCommitComment: [\"GET /repos/{owner}/{repo}/comments/{comment_id}\"],\n getCommitSignatureProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures\"],\n getCommunityProfileMetrics: [\"GET /repos/{owner}/{repo}/community/profile\"],\n getContent: [\"GET /repos/{owner}/{repo}/contents/{path}\"],\n getContributorsStats: [\"GET /repos/{owner}/{repo}/stats/contributors\"],\n getDeployKey: [\"GET /repos/{owner}/{repo}/keys/{key_id}\"],\n getDeployment: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}\"],\n getDeploymentBranchPolicy: [\"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n getDeploymentStatus: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}\"],\n getEnvironment: [\"GET /repos/{owner}/{repo}/environments/{environment_name}\"],\n getLatestPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/latest\"],\n getLatestRelease: [\"GET /repos/{owner}/{repo}/releases/latest\"],\n getPages: [\"GET /repos/{owner}/{repo}/pages\"],\n getPagesBuild: [\"GET /repos/{owner}/{repo}/pages/builds/{build_id}\"],\n getPagesHealthCheck: [\"GET /repos/{owner}/{repo}/pages/health\"],\n getParticipationStats: [\"GET /repos/{owner}/{repo}/stats/participation\"],\n getPullRequestReviewProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n getPunchCardStats: [\"GET /repos/{owner}/{repo}/stats/punch_card\"],\n getReadme: [\"GET /repos/{owner}/{repo}/readme\"],\n getReadmeInDirectory: [\"GET /repos/{owner}/{repo}/readme/{dir}\"],\n getRelease: [\"GET /repos/{owner}/{repo}/releases/{release_id}\"],\n getReleaseAsset: [\"GET /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n getReleaseByTag: [\"GET /repos/{owner}/{repo}/releases/tags/{tag}\"],\n getStatusChecksProtection: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n getTeamsWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\"],\n getTopPaths: [\"GET /repos/{owner}/{repo}/traffic/popular/paths\"],\n getTopReferrers: [\"GET /repos/{owner}/{repo}/traffic/popular/referrers\"],\n getUsersWithAccessToProtectedBranch: [\"GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\"],\n getViews: [\"GET /repos/{owner}/{repo}/traffic/views\"],\n getWebhook: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}\"],\n getWebhookConfigForRepo: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n getWebhookDelivery: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}\"],\n listAutolinks: [\"GET /repos/{owner}/{repo}/autolinks\"],\n listBranches: [\"GET /repos/{owner}/{repo}/branches\"],\n listBranchesForHeadCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head\"],\n listCollaborators: [\"GET /repos/{owner}/{repo}/collaborators\"],\n listCommentsForCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/comments\"],\n listCommitCommentsForRepo: [\"GET /repos/{owner}/{repo}/comments\"],\n listCommitStatusesForRef: [\"GET /repos/{owner}/{repo}/commits/{ref}/statuses\"],\n listCommits: [\"GET /repos/{owner}/{repo}/commits\"],\n listContributors: [\"GET /repos/{owner}/{repo}/contributors\"],\n listDeployKeys: [\"GET /repos/{owner}/{repo}/keys\"],\n listDeploymentBranchPolicies: [\"GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies\"],\n listDeploymentStatuses: [\"GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses\"],\n listDeployments: [\"GET /repos/{owner}/{repo}/deployments\"],\n listForAuthenticatedUser: [\"GET /user/repos\"],\n listForOrg: [\"GET /orgs/{org}/repos\"],\n listForUser: [\"GET /users/{username}/repos\"],\n listForks: [\"GET /repos/{owner}/{repo}/forks\"],\n listInvitations: [\"GET /repos/{owner}/{repo}/invitations\"],\n listInvitationsForAuthenticatedUser: [\"GET /user/repository_invitations\"],\n listLanguages: [\"GET /repos/{owner}/{repo}/languages\"],\n listPagesBuilds: [\"GET /repos/{owner}/{repo}/pages/builds\"],\n listPublic: [\"GET /repositories\"],\n listPullRequestsAssociatedWithCommit: [\"GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls\"],\n listReleaseAssets: [\"GET /repos/{owner}/{repo}/releases/{release_id}/assets\"],\n listReleases: [\"GET /repos/{owner}/{repo}/releases\"],\n listTagProtection: [\"GET /repos/{owner}/{repo}/tags/protection\"],\n listTags: [\"GET /repos/{owner}/{repo}/tags\"],\n listTeams: [\"GET /repos/{owner}/{repo}/teams\"],\n listWebhookDeliveries: [\"GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries\"],\n listWebhooks: [\"GET /repos/{owner}/{repo}/hooks\"],\n merge: [\"POST /repos/{owner}/{repo}/merges\"],\n mergeUpstream: [\"POST /repos/{owner}/{repo}/merge-upstream\"],\n pingWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/pings\"],\n redeliverWebhookDelivery: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts\"],\n removeAppAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n removeCollaborator: [\"DELETE /repos/{owner}/{repo}/collaborators/{username}\"],\n removeStatusCheckContexts: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n removeStatusCheckProtection: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n removeTeamAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n removeUserAccessRestrictions: [\"DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n renameBranch: [\"POST /repos/{owner}/{repo}/branches/{branch}/rename\"],\n replaceAllTopics: [\"PUT /repos/{owner}/{repo}/topics\"],\n requestPagesBuild: [\"POST /repos/{owner}/{repo}/pages/builds\"],\n setAdminBranchProtection: [\"POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins\"],\n setAppAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps\", {}, {\n mapToData: \"apps\"\n }],\n setStatusCheckContexts: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts\", {}, {\n mapToData: \"contexts\"\n }],\n setTeamAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams\", {}, {\n mapToData: \"teams\"\n }],\n setUserAccessRestrictions: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users\", {}, {\n mapToData: \"users\"\n }],\n testPushWebhook: [\"POST /repos/{owner}/{repo}/hooks/{hook_id}/tests\"],\n transfer: [\"POST /repos/{owner}/{repo}/transfer\"],\n update: [\"PATCH /repos/{owner}/{repo}\"],\n updateBranchProtection: [\"PUT /repos/{owner}/{repo}/branches/{branch}/protection\"],\n updateCommitComment: [\"PATCH /repos/{owner}/{repo}/comments/{comment_id}\"],\n updateDeploymentBranchPolicy: [\"PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}\"],\n updateInformationAboutPagesSite: [\"PUT /repos/{owner}/{repo}/pages\"],\n updateInvitation: [\"PATCH /repos/{owner}/{repo}/invitations/{invitation_id}\"],\n updatePullRequestReviewProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews\"],\n updateRelease: [\"PATCH /repos/{owner}/{repo}/releases/{release_id}\"],\n updateReleaseAsset: [\"PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}\"],\n updateStatusCheckPotection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\", {}, {\n renamed: [\"repos\", \"updateStatusCheckProtection\"]\n }],\n updateStatusCheckProtection: [\"PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks\"],\n updateWebhook: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}\"],\n updateWebhookConfigForRepo: [\"PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config\"],\n uploadReleaseAsset: [\"POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}\", {\n baseUrl: \"https://uploads.github.com\"\n }]\n },\n search: {\n code: [\"GET /search/code\"],\n commits: [\"GET /search/commits\"],\n issuesAndPullRequests: [\"GET /search/issues\"],\n labels: [\"GET /search/labels\"],\n repos: [\"GET /search/repositories\"],\n topics: [\"GET /search/topics\"],\n users: [\"GET /search/users\"]\n },\n secretScanning: {\n getAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"],\n getSecurityAnalysisSettingsForEnterprise: [\"GET /enterprises/{enterprise}/code_security_and_analysis\"],\n listAlertsForEnterprise: [\"GET /enterprises/{enterprise}/secret-scanning/alerts\"],\n listAlertsForOrg: [\"GET /orgs/{org}/secret-scanning/alerts\"],\n listAlertsForRepo: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts\"],\n listLocationsForAlert: [\"GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations\"],\n patchSecurityAnalysisSettingsForEnterprise: [\"PATCH /enterprises/{enterprise}/code_security_and_analysis\"],\n postSecurityProductEnablementForEnterprise: [\"POST /enterprises/{enterprise}/{security_product}/{enablement}\"],\n updateAlert: [\"PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}\"]\n },\n teams: {\n addOrUpdateMembershipForUserInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n addOrUpdateProjectPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n addOrUpdateRepoPermissionsInOrg: [\"PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n checkPermissionsForProjectInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n checkPermissionsForRepoInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n create: [\"POST /orgs/{org}/teams\"],\n createDiscussionCommentInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n createDiscussionInOrg: [\"POST /orgs/{org}/teams/{team_slug}/discussions\"],\n deleteDiscussionCommentInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n deleteDiscussionInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n deleteInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}\"],\n getByName: [\"GET /orgs/{org}/teams/{team_slug}\"],\n getDiscussionCommentInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n getDiscussionInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n getMembershipForUserInOrg: [\"GET /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n list: [\"GET /orgs/{org}/teams\"],\n listChildInOrg: [\"GET /orgs/{org}/teams/{team_slug}/teams\"],\n listDiscussionCommentsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments\"],\n listDiscussionsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/discussions\"],\n listForAuthenticatedUser: [\"GET /user/teams\"],\n listMembersInOrg: [\"GET /orgs/{org}/teams/{team_slug}/members\"],\n listPendingInvitationsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/invitations\"],\n listProjectsInOrg: [\"GET /orgs/{org}/teams/{team_slug}/projects\"],\n listReposInOrg: [\"GET /orgs/{org}/teams/{team_slug}/repos\"],\n removeMembershipForUserInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}\"],\n removeProjectInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}\"],\n removeRepoInOrg: [\"DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}\"],\n updateDiscussionCommentInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}\"],\n updateDiscussionInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}\"],\n updateInOrg: [\"PATCH /orgs/{org}/teams/{team_slug}\"]\n },\n users: {\n addEmailForAuthenticated: [\"POST /user/emails\", {}, {\n renamed: [\"users\", \"addEmailForAuthenticatedUser\"]\n }],\n addEmailForAuthenticatedUser: [\"POST /user/emails\"],\n block: [\"PUT /user/blocks/{username}\"],\n checkBlocked: [\"GET /user/blocks/{username}\"],\n checkFollowingForUser: [\"GET /users/{username}/following/{target_user}\"],\n checkPersonIsFollowedByAuthenticated: [\"GET /user/following/{username}\"],\n createGpgKeyForAuthenticated: [\"POST /user/gpg_keys\", {}, {\n renamed: [\"users\", \"createGpgKeyForAuthenticatedUser\"]\n }],\n createGpgKeyForAuthenticatedUser: [\"POST /user/gpg_keys\"],\n createPublicSshKeyForAuthenticated: [\"POST /user/keys\", {}, {\n renamed: [\"users\", \"createPublicSshKeyForAuthenticatedUser\"]\n }],\n createPublicSshKeyForAuthenticatedUser: [\"POST /user/keys\"],\n createSshSigningKeyForAuthenticatedUser: [\"POST /user/ssh_signing_keys\"],\n deleteEmailForAuthenticated: [\"DELETE /user/emails\", {}, {\n renamed: [\"users\", \"deleteEmailForAuthenticatedUser\"]\n }],\n deleteEmailForAuthenticatedUser: [\"DELETE /user/emails\"],\n deleteGpgKeyForAuthenticated: [\"DELETE /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"deleteGpgKeyForAuthenticatedUser\"]\n }],\n deleteGpgKeyForAuthenticatedUser: [\"DELETE /user/gpg_keys/{gpg_key_id}\"],\n deletePublicSshKeyForAuthenticated: [\"DELETE /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"deletePublicSshKeyForAuthenticatedUser\"]\n }],\n deletePublicSshKeyForAuthenticatedUser: [\"DELETE /user/keys/{key_id}\"],\n deleteSshSigningKeyForAuthenticatedUser: [\"DELETE /user/ssh_signing_keys/{ssh_signing_key_id}\"],\n follow: [\"PUT /user/following/{username}\"],\n getAuthenticated: [\"GET /user\"],\n getByUsername: [\"GET /users/{username}\"],\n getContextForUser: [\"GET /users/{username}/hovercard\"],\n getGpgKeyForAuthenticated: [\"GET /user/gpg_keys/{gpg_key_id}\", {}, {\n renamed: [\"users\", \"getGpgKeyForAuthenticatedUser\"]\n }],\n getGpgKeyForAuthenticatedUser: [\"GET /user/gpg_keys/{gpg_key_id}\"],\n getPublicSshKeyForAuthenticated: [\"GET /user/keys/{key_id}\", {}, {\n renamed: [\"users\", \"getPublicSshKeyForAuthenticatedUser\"]\n }],\n getPublicSshKeyForAuthenticatedUser: [\"GET /user/keys/{key_id}\"],\n getSshSigningKeyForAuthenticatedUser: [\"GET /user/ssh_signing_keys/{ssh_signing_key_id}\"],\n list: [\"GET /users\"],\n listBlockedByAuthenticated: [\"GET /user/blocks\", {}, {\n renamed: [\"users\", \"listBlockedByAuthenticatedUser\"]\n }],\n listBlockedByAuthenticatedUser: [\"GET /user/blocks\"],\n listEmailsForAuthenticated: [\"GET /user/emails\", {}, {\n renamed: [\"users\", \"listEmailsForAuthenticatedUser\"]\n }],\n listEmailsForAuthenticatedUser: [\"GET /user/emails\"],\n listFollowedByAuthenticated: [\"GET /user/following\", {}, {\n renamed: [\"users\", \"listFollowedByAuthenticatedUser\"]\n }],\n listFollowedByAuthenticatedUser: [\"GET /user/following\"],\n listFollowersForAuthenticatedUser: [\"GET /user/followers\"],\n listFollowersForUser: [\"GET /users/{username}/followers\"],\n listFollowingForUser: [\"GET /users/{username}/following\"],\n listGpgKeysForAuthenticated: [\"GET /user/gpg_keys\", {}, {\n renamed: [\"users\", \"listGpgKeysForAuthenticatedUser\"]\n }],\n listGpgKeysForAuthenticatedUser: [\"GET /user/gpg_keys\"],\n listGpgKeysForUser: [\"GET /users/{username}/gpg_keys\"],\n listPublicEmailsForAuthenticated: [\"GET /user/public_emails\", {}, {\n renamed: [\"users\", \"listPublicEmailsForAuthenticatedUser\"]\n }],\n listPublicEmailsForAuthenticatedUser: [\"GET /user/public_emails\"],\n listPublicKeysForUser: [\"GET /users/{username}/keys\"],\n listPublicSshKeysForAuthenticated: [\"GET /user/keys\", {}, {\n renamed: [\"users\", \"listPublicSshKeysForAuthenticatedUser\"]\n }],\n listPublicSshKeysForAuthenticatedUser: [\"GET /user/keys\"],\n listSshSigningKeysForAuthenticatedUser: [\"GET /user/ssh_signing_keys\"],\n listSshSigningKeysForUser: [\"GET /users/{username}/ssh_signing_keys\"],\n setPrimaryEmailVisibilityForAuthenticated: [\"PATCH /user/email/visibility\", {}, {\n renamed: [\"users\", \"setPrimaryEmailVisibilityForAuthenticatedUser\"]\n }],\n setPrimaryEmailVisibilityForAuthenticatedUser: [\"PATCH /user/email/visibility\"],\n unblock: [\"DELETE /user/blocks/{username}\"],\n unfollow: [\"DELETE /user/following/{username}\"],\n updateAuthenticated: [\"PATCH /user\"]\n }\n};\n\nconst VERSION = \"7.0.1\";\n\nfunction endpointsToMethods(octokit, endpointsMap) {\n const newMethods = {};\n for (const [scope, endpoints] of Object.entries(endpointsMap)) {\n for (const [methodName, endpoint] of Object.entries(endpoints)) {\n const [route, defaults, decorations] = endpoint;\n const [method, url] = route.split(/ /);\n const endpointDefaults = Object.assign({\n method,\n url\n }, defaults);\n if (!newMethods[scope]) {\n newMethods[scope] = {};\n }\n const scopeMethods = newMethods[scope];\n if (decorations) {\n scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);\n continue;\n }\n scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);\n }\n }\n return newMethods;\n}\nfunction decorate(octokit, scope, methodName, defaults, decorations) {\n const requestWithDefaults = octokit.request.defaults(defaults);\n /* istanbul ignore next */\n function withDecorations(...args) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n let options = requestWithDefaults.endpoint.merge(...args);\n // There are currently no other decorations than `.mapToData`\n if (decorations.mapToData) {\n options = Object.assign({}, options, {\n data: options[decorations.mapToData],\n [decorations.mapToData]: undefined\n });\n return requestWithDefaults(options);\n }\n if (decorations.renamed) {\n const [newScope, newMethodName] = decorations.renamed;\n octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);\n }\n if (decorations.deprecated) {\n octokit.log.warn(decorations.deprecated);\n }\n if (decorations.renamedParameters) {\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n const options = requestWithDefaults.endpoint.merge(...args);\n for (const [name, alias] of Object.entries(decorations.renamedParameters)) {\n if (name in options) {\n octokit.log.warn(`\"${name}\" parameter is deprecated for \"octokit.${scope}.${methodName}()\". Use \"${alias}\" instead`);\n if (!(alias in options)) {\n options[alias] = options[name];\n }\n delete options[name];\n }\n }\n return requestWithDefaults(options);\n }\n // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488\n return requestWithDefaults(...args);\n }\n return Object.assign(withDecorations, requestWithDefaults);\n}\n\nfunction restEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n rest: api\n };\n}\nrestEndpointMethods.VERSION = VERSION;\nfunction legacyRestEndpointMethods(octokit) {\n const api = endpointsToMethods(octokit, Endpoints);\n return {\n ...api,\n rest: api\n };\n}\nlegacyRestEndpointMethods.VERSION = VERSION;\n\nexports.legacyRestEndpointMethods = legacyRestEndpointMethods;\nexports.restEndpointMethods = restEndpointMethods;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar deprecation = require('deprecation');\nvar once = _interopDefault(require('once'));\n\nconst logOnceCode = once(deprecation => console.warn(deprecation));\nconst logOnceHeaders = once(deprecation => console.warn(deprecation));\n/**\n * Error with extra properties to help with debugging\n */\nclass RequestError extends Error {\n constructor(message, statusCode, options) {\n super(message);\n // Maintains proper stack trace (only available on V8)\n /* istanbul ignore next */\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n this.name = \"HttpError\";\n this.status = statusCode;\n let headers;\n if (\"headers\" in options && typeof options.headers !== \"undefined\") {\n headers = options.headers;\n }\n if (\"response\" in options) {\n this.response = options.response;\n headers = options.response.headers;\n }\n // redact request credentials without mutating original request options\n const requestCopy = Object.assign({}, options.request);\n if (options.request.headers.authorization) {\n requestCopy.headers = Object.assign({}, options.request.headers, {\n authorization: options.request.headers.authorization.replace(/ .*$/, \" [REDACTED]\")\n });\n }\n requestCopy.url = requestCopy.url\n // client_id & client_secret can be passed as URL query parameters to increase rate limit\n // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications\n .replace(/\\bclient_secret=\\w+/g, \"client_secret=[REDACTED]\")\n // OAuth tokens can be passed as URL query parameters, although it is not recommended\n // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header\n .replace(/\\baccess_token=\\w+/g, \"access_token=[REDACTED]\");\n this.request = requestCopy;\n // deprecations\n Object.defineProperty(this, \"code\", {\n get() {\n logOnceCode(new deprecation.Deprecation(\"[@octokit/request-error] `error.code` is deprecated, use `error.status`.\"));\n return statusCode;\n }\n });\n Object.defineProperty(this, \"headers\", {\n get() {\n logOnceHeaders(new deprecation.Deprecation(\"[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.\"));\n return headers || {};\n }\n });\n }\n}\n\nexports.RequestError = RequestError;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar endpoint = require('@octokit/endpoint');\nvar universalUserAgent = require('universal-user-agent');\nvar isPlainObject = require('is-plain-object');\nvar nodeFetch = _interopDefault(require('node-fetch'));\nvar requestError = require('@octokit/request-error');\n\nconst VERSION = \"6.2.3\";\n\nfunction getBufferResponse(response) {\n return response.arrayBuffer();\n}\n\nfunction fetchWrapper(requestOptions) {\n const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;\n if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {\n requestOptions.body = JSON.stringify(requestOptions.body);\n }\n let headers = {};\n let status;\n let url;\n const fetch = requestOptions.request && requestOptions.request.fetch || globalThis.fetch || /* istanbul ignore next */nodeFetch;\n return fetch(requestOptions.url, Object.assign({\n method: requestOptions.method,\n body: requestOptions.body,\n headers: requestOptions.headers,\n redirect: requestOptions.redirect\n },\n // `requestOptions.request.agent` type is incompatible\n // see https://github.com/octokit/types.ts/pull/264\n requestOptions.request)).then(async response => {\n url = response.url;\n status = response.status;\n for (const keyAndValue of response.headers) {\n headers[keyAndValue[0]] = keyAndValue[1];\n }\n if (\"deprecation\" in headers) {\n const matches = headers.link && headers.link.match(/<([^>]+)>; rel=\"deprecation\"/);\n const deprecationLink = matches && matches.pop();\n log.warn(`[@octokit/request] \"${requestOptions.method} ${requestOptions.url}\" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : \"\"}`);\n }\n if (status === 204 || status === 205) {\n return;\n }\n // GitHub API returns 200 for HEAD requests\n if (requestOptions.method === \"HEAD\") {\n if (status < 400) {\n return;\n }\n throw new requestError.RequestError(response.statusText, status, {\n response: {\n url,\n status,\n headers,\n data: undefined\n },\n request: requestOptions\n });\n }\n if (status === 304) {\n throw new requestError.RequestError(\"Not modified\", status, {\n response: {\n url,\n status,\n headers,\n data: await getResponseData(response)\n },\n request: requestOptions\n });\n }\n if (status >= 400) {\n const data = await getResponseData(response);\n const error = new requestError.RequestError(toErrorMessage(data), status, {\n response: {\n url,\n status,\n headers,\n data\n },\n request: requestOptions\n });\n throw error;\n }\n return getResponseData(response);\n }).then(data => {\n return {\n status,\n url,\n headers,\n data\n };\n }).catch(error => {\n if (error instanceof requestError.RequestError) throw error;else if (error.name === \"AbortError\") throw error;\n throw new requestError.RequestError(error.message, 500, {\n request: requestOptions\n });\n });\n}\nasync function getResponseData(response) {\n const contentType = response.headers.get(\"content-type\");\n if (/application\\/json/.test(contentType)) {\n return response.json();\n }\n if (!contentType || /^text\\/|charset=utf-8$/.test(contentType)) {\n return response.text();\n }\n return getBufferResponse(response);\n}\nfunction toErrorMessage(data) {\n if (typeof data === \"string\") return data;\n // istanbul ignore else - just in case\n if (\"message\" in data) {\n if (Array.isArray(data.errors)) {\n return `${data.message}: ${data.errors.map(JSON.stringify).join(\", \")}`;\n }\n return data.message;\n }\n // istanbul ignore next - just in case\n return `Unknown error: ${JSON.stringify(data)}`;\n}\n\nfunction withDefaults(oldEndpoint, newDefaults) {\n const endpoint = oldEndpoint.defaults(newDefaults);\n const newApi = function (route, parameters) {\n const endpointOptions = endpoint.merge(route, parameters);\n if (!endpointOptions.request || !endpointOptions.request.hook) {\n return fetchWrapper(endpoint.parse(endpointOptions));\n }\n const request = (route, parameters) => {\n return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));\n };\n Object.assign(request, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n return endpointOptions.request.hook(request, endpointOptions);\n };\n return Object.assign(newApi, {\n endpoint,\n defaults: withDefaults.bind(null, endpoint)\n });\n}\n\nconst request = withDefaults(endpoint.endpoint, {\n headers: {\n \"user-agent\": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`\n }\n});\n\nexports.request = request;\n//# sourceMappingURL=index.js.map\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar core = require('@octokit/core');\nvar pluginRequestLog = require('@octokit/plugin-request-log');\nvar pluginPaginateRest = require('@octokit/plugin-paginate-rest');\nvar pluginRestEndpointMethods = require('@octokit/plugin-rest-endpoint-methods');\n\nconst VERSION = \"19.0.7\";\n\nconst Octokit = core.Octokit.plugin(pluginRequestLog.requestLog, pluginRestEndpointMethods.legacyRestEndpointMethods, pluginPaginateRest.paginateRest).defaults({\n userAgent: `octokit-rest.js/${VERSION}`\n});\n\nexports.Octokit = Octokit;\n//# sourceMappingURL=index.js.map\n","/**\n * @author Toru Nagashima \n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nvar eventTargetShim = require('event-target-shim');\n\n/**\n * The signal class.\n * @see https://dom.spec.whatwg.org/#abortsignal\n */\nclass AbortSignal extends eventTargetShim.EventTarget {\n /**\n * AbortSignal cannot be constructed directly.\n */\n constructor() {\n super();\n throw new TypeError(\"AbortSignal cannot be constructed directly\");\n }\n /**\n * Returns `true` if this `AbortSignal`'s `AbortController` has signaled to abort, and `false` otherwise.\n */\n get aborted() {\n const aborted = abortedFlags.get(this);\n if (typeof aborted !== \"boolean\") {\n throw new TypeError(`Expected 'this' to be an 'AbortSignal' object, but got ${this === null ? \"null\" : typeof this}`);\n }\n return aborted;\n }\n}\neventTargetShim.defineEventAttribute(AbortSignal.prototype, \"abort\");\n/**\n * Create an AbortSignal object.\n */\nfunction createAbortSignal() {\n const signal = Object.create(AbortSignal.prototype);\n eventTargetShim.EventTarget.call(signal);\n abortedFlags.set(signal, false);\n return signal;\n}\n/**\n * Abort a given signal.\n */\nfunction abortSignal(signal) {\n if (abortedFlags.get(signal) !== false) {\n return;\n }\n abortedFlags.set(signal, true);\n signal.dispatchEvent({ type: \"abort\" });\n}\n/**\n * Aborted flag for each instances.\n */\nconst abortedFlags = new WeakMap();\n// Properties should be enumerable.\nObject.defineProperties(AbortSignal.prototype, {\n aborted: { enumerable: true },\n});\n// `toString()` should return `\"[object AbortSignal]\"`\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortSignal.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortSignal\",\n });\n}\n\n/**\n * The AbortController.\n * @see https://dom.spec.whatwg.org/#abortcontroller\n */\nclass AbortController {\n /**\n * Initialize this controller.\n */\n constructor() {\n signals.set(this, createAbortSignal());\n }\n /**\n * Returns the `AbortSignal` object associated with this object.\n */\n get signal() {\n return getSignal(this);\n }\n /**\n * Abort and signal to any observers that the associated activity is to be aborted.\n */\n abort() {\n abortSignal(getSignal(this));\n }\n}\n/**\n * Associated signals.\n */\nconst signals = new WeakMap();\n/**\n * Get the associated signal of a given controller.\n */\nfunction getSignal(controller) {\n const signal = signals.get(controller);\n if (signal == null) {\n throw new TypeError(`Expected 'this' to be an 'AbortController' object, but got ${controller === null ? \"null\" : typeof controller}`);\n }\n return signal;\n}\n// Properties should be enumerable.\nObject.defineProperties(AbortController.prototype, {\n signal: { enumerable: true },\n abort: { enumerable: true },\n});\nif (typeof Symbol === \"function\" && typeof Symbol.toStringTag === \"symbol\") {\n Object.defineProperty(AbortController.prototype, Symbol.toStringTag, {\n configurable: true,\n value: \"AbortController\",\n });\n}\n\nexports.AbortController = AbortController;\nexports.AbortSignal = AbortSignal;\nexports.default = AbortController;\n\nmodule.exports = AbortController\nmodule.exports.AbortController = module.exports[\"default\"] = AbortController\nmodule.exports.AbortSignal = AbortSignal\n//# sourceMappingURL=abort-controller.js.map\n","'use strict';\n\nmodule.exports = require('./lib/agent');\nmodule.exports.HttpsAgent = require('./lib/https_agent');\nmodule.exports.constants = require('./lib/constants');\n","'use strict';\n\nconst OriginalAgent = require('http').Agent;\nconst ms = require('humanize-ms');\nconst debug = require('util').debuglog('agentkeepalive');\nconst {\n INIT_SOCKET,\n CURRENT_ID,\n CREATE_ID,\n SOCKET_CREATED_TIME,\n SOCKET_NAME,\n SOCKET_REQUEST_COUNT,\n SOCKET_REQUEST_FINISHED_COUNT,\n} = require('./constants');\n\n// OriginalAgent come from\n// - https://github.com/nodejs/node/blob/v8.12.0/lib/_http_agent.js\n// - https://github.com/nodejs/node/blob/v10.12.0/lib/_http_agent.js\n\n// node <= 10\nlet defaultTimeoutListenerCount = 1;\nconst majorVersion = parseInt(process.version.split('.', 1)[0].substring(1));\nif (majorVersion >= 11 && majorVersion <= 12) {\n defaultTimeoutListenerCount = 2;\n} else if (majorVersion >= 13) {\n defaultTimeoutListenerCount = 3;\n}\n\nfunction deprecate(message) {\n console.log('[agentkeepalive:deprecated] %s', message);\n}\n\nclass Agent extends OriginalAgent {\n constructor(options) {\n options = options || {};\n options.keepAlive = options.keepAlive !== false;\n // default is keep-alive and 4s free socket timeout\n // see https://medium.com/ssense-tech/reduce-networking-errors-in-nodejs-23b4eb9f2d83\n if (options.freeSocketTimeout === undefined) {\n options.freeSocketTimeout = 4000;\n }\n // Legacy API: keepAliveTimeout should be rename to `freeSocketTimeout`\n if (options.keepAliveTimeout) {\n deprecate('options.keepAliveTimeout is deprecated, please use options.freeSocketTimeout instead');\n options.freeSocketTimeout = options.keepAliveTimeout;\n delete options.keepAliveTimeout;\n }\n // Legacy API: freeSocketKeepAliveTimeout should be rename to `freeSocketTimeout`\n if (options.freeSocketKeepAliveTimeout) {\n deprecate('options.freeSocketKeepAliveTimeout is deprecated, please use options.freeSocketTimeout instead');\n options.freeSocketTimeout = options.freeSocketKeepAliveTimeout;\n delete options.freeSocketKeepAliveTimeout;\n }\n\n // Sets the socket to timeout after timeout milliseconds of inactivity on the socket.\n // By default is double free socket timeout.\n if (options.timeout === undefined) {\n // make sure socket default inactivity timeout >= 8s\n options.timeout = Math.max(options.freeSocketTimeout * 2, 8000);\n }\n\n // support humanize format\n options.timeout = ms(options.timeout);\n options.freeSocketTimeout = ms(options.freeSocketTimeout);\n options.socketActiveTTL = options.socketActiveTTL ? ms(options.socketActiveTTL) : 0;\n\n super(options);\n\n this[CURRENT_ID] = 0;\n\n // create socket success counter\n this.createSocketCount = 0;\n this.createSocketCountLastCheck = 0;\n\n this.createSocketErrorCount = 0;\n this.createSocketErrorCountLastCheck = 0;\n\n this.closeSocketCount = 0;\n this.closeSocketCountLastCheck = 0;\n\n // socket error event count\n this.errorSocketCount = 0;\n this.errorSocketCountLastCheck = 0;\n\n // request finished counter\n this.requestCount = 0;\n this.requestCountLastCheck = 0;\n\n // including free socket timeout counter\n this.timeoutSocketCount = 0;\n this.timeoutSocketCountLastCheck = 0;\n\n this.on('free', socket => {\n // https://github.com/nodejs/node/pull/32000\n // Node.js native agent will check socket timeout eqs agent.options.timeout.\n // Use the ttl or freeSocketTimeout to overwrite.\n const timeout = this.calcSocketTimeout(socket);\n if (timeout > 0 && socket.timeout !== timeout) {\n socket.setTimeout(timeout);\n }\n });\n }\n\n get freeSocketKeepAliveTimeout() {\n deprecate('agent.freeSocketKeepAliveTimeout is deprecated, please use agent.options.freeSocketTimeout instead');\n return this.options.freeSocketTimeout;\n }\n\n get timeout() {\n deprecate('agent.timeout is deprecated, please use agent.options.timeout instead');\n return this.options.timeout;\n }\n\n get socketActiveTTL() {\n deprecate('agent.socketActiveTTL is deprecated, please use agent.options.socketActiveTTL instead');\n return this.options.socketActiveTTL;\n }\n\n calcSocketTimeout(socket) {\n /**\n * return <= 0: should free socket\n * return > 0: should update socket timeout\n * return undefined: not find custom timeout\n */\n let freeSocketTimeout = this.options.freeSocketTimeout;\n const socketActiveTTL = this.options.socketActiveTTL;\n if (socketActiveTTL) {\n // check socketActiveTTL\n const aliveTime = Date.now() - socket[SOCKET_CREATED_TIME];\n const diff = socketActiveTTL - aliveTime;\n if (diff <= 0) {\n return diff;\n }\n if (freeSocketTimeout && diff < freeSocketTimeout) {\n freeSocketTimeout = diff;\n }\n }\n // set freeSocketTimeout\n if (freeSocketTimeout) {\n // set free keepalive timer\n // try to use socket custom freeSocketTimeout first, support headers['keep-alive']\n // https://github.com/node-modules/urllib/blob/b76053020923f4d99a1c93cf2e16e0c5ba10bacf/lib/urllib.js#L498\n const customFreeSocketTimeout = socket.freeSocketTimeout || socket.freeSocketKeepAliveTimeout;\n return customFreeSocketTimeout || freeSocketTimeout;\n }\n }\n\n keepSocketAlive(socket) {\n const result = super.keepSocketAlive(socket);\n // should not keepAlive, do nothing\n if (!result) return result;\n\n const customTimeout = this.calcSocketTimeout(socket);\n if (typeof customTimeout === 'undefined') {\n return true;\n }\n if (customTimeout <= 0) {\n debug('%s(requests: %s, finished: %s) free but need to destroy by TTL, request count %s, diff is %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], customTimeout);\n return false;\n }\n if (socket.timeout !== customTimeout) {\n socket.setTimeout(customTimeout);\n }\n return true;\n }\n\n // only call on addRequest\n reuseSocket(...args) {\n // reuseSocket(socket, req)\n super.reuseSocket(...args);\n const socket = args[0];\n const req = args[1];\n req.reusedSocket = true;\n const agentTimeout = this.options.timeout;\n if (getSocketTimeout(socket) !== agentTimeout) {\n // reset timeout before use\n socket.setTimeout(agentTimeout);\n debug('%s reset timeout to %sms', socket[SOCKET_NAME], agentTimeout);\n }\n socket[SOCKET_REQUEST_COUNT]++;\n debug('%s(requests: %s, finished: %s) reuse on addRequest, timeout %sms',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n getSocketTimeout(socket));\n }\n\n [CREATE_ID]() {\n const id = this[CURRENT_ID]++;\n if (this[CURRENT_ID] === Number.MAX_SAFE_INTEGER) this[CURRENT_ID] = 0;\n return id;\n }\n\n [INIT_SOCKET](socket, options) {\n // bugfix here.\n // https on node 8, 10 won't set agent.options.timeout by default\n // TODO: need to fix on node itself\n if (options.timeout) {\n const timeout = getSocketTimeout(socket);\n if (!timeout) {\n socket.setTimeout(options.timeout);\n }\n }\n\n if (this.options.keepAlive) {\n // Disable Nagle's algorithm: http://blog.caustik.com/2012/04/08/scaling-node-js-to-100k-concurrent-connections/\n // https://fengmk2.com/benchmark/nagle-algorithm-delayed-ack-mock.html\n socket.setNoDelay(true);\n }\n this.createSocketCount++;\n if (this.options.socketActiveTTL) {\n socket[SOCKET_CREATED_TIME] = Date.now();\n }\n // don't show the hole '-----BEGIN CERTIFICATE----' key string\n socket[SOCKET_NAME] = `sock[${this[CREATE_ID]()}#${options._agentKey}]`.split('-----BEGIN', 1)[0];\n socket[SOCKET_REQUEST_COUNT] = 1;\n socket[SOCKET_REQUEST_FINISHED_COUNT] = 0;\n installListeners(this, socket, options);\n }\n\n createConnection(options, oncreate) {\n let called = false;\n const onNewCreate = (err, socket) => {\n if (called) return;\n called = true;\n\n if (err) {\n this.createSocketErrorCount++;\n return oncreate(err);\n }\n this[INIT_SOCKET](socket, options);\n oncreate(err, socket);\n };\n\n const newSocket = super.createConnection(options, onNewCreate);\n if (newSocket) onNewCreate(null, newSocket);\n return newSocket;\n }\n\n get statusChanged() {\n const changed = this.createSocketCount !== this.createSocketCountLastCheck ||\n this.createSocketErrorCount !== this.createSocketErrorCountLastCheck ||\n this.closeSocketCount !== this.closeSocketCountLastCheck ||\n this.errorSocketCount !== this.errorSocketCountLastCheck ||\n this.timeoutSocketCount !== this.timeoutSocketCountLastCheck ||\n this.requestCount !== this.requestCountLastCheck;\n if (changed) {\n this.createSocketCountLastCheck = this.createSocketCount;\n this.createSocketErrorCountLastCheck = this.createSocketErrorCount;\n this.closeSocketCountLastCheck = this.closeSocketCount;\n this.errorSocketCountLastCheck = this.errorSocketCount;\n this.timeoutSocketCountLastCheck = this.timeoutSocketCount;\n this.requestCountLastCheck = this.requestCount;\n }\n return changed;\n }\n\n getCurrentStatus() {\n return {\n createSocketCount: this.createSocketCount,\n createSocketErrorCount: this.createSocketErrorCount,\n closeSocketCount: this.closeSocketCount,\n errorSocketCount: this.errorSocketCount,\n timeoutSocketCount: this.timeoutSocketCount,\n requestCount: this.requestCount,\n freeSockets: inspect(this.freeSockets),\n sockets: inspect(this.sockets),\n requests: inspect(this.requests),\n };\n }\n}\n\n// node 8 don't has timeout attribute on socket\n// https://github.com/nodejs/node/pull/21204/files#diff-e6ef024c3775d787c38487a6309e491dR408\nfunction getSocketTimeout(socket) {\n return socket.timeout || socket._idleTimeout;\n}\n\nfunction installListeners(agent, socket, options) {\n debug('%s create, timeout %sms', socket[SOCKET_NAME], getSocketTimeout(socket));\n\n // listener socket events: close, timeout, error, free\n function onFree() {\n // create and socket.emit('free') logic\n // https://github.com/nodejs/node/blob/master/lib/_http_agent.js#L311\n // no req on the socket, it should be the new socket\n if (!socket._httpMessage && socket[SOCKET_REQUEST_COUNT] === 1) return;\n\n socket[SOCKET_REQUEST_FINISHED_COUNT]++;\n agent.requestCount++;\n debug('%s(requests: %s, finished: %s) free',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n\n // should reuse on pedding requests?\n const name = agent.getName(options);\n if (socket.writable && agent.requests[name] && agent.requests[name].length) {\n // will be reuse on agent free listener\n socket[SOCKET_REQUEST_COUNT]++;\n debug('%s(requests: %s, finished: %s) will be reuse on agent free event',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n }\n }\n socket.on('free', onFree);\n\n function onClose(isError) {\n debug('%s(requests: %s, finished: %s) close, isError: %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT], isError);\n agent.closeSocketCount++;\n }\n socket.on('close', onClose);\n\n // start socket timeout handler\n function onTimeout() {\n // onTimeout and emitRequestTimeout(_http_client.js)\n // https://github.com/nodejs/node/blob/v12.x/lib/_http_client.js#L711\n const listenerCount = socket.listeners('timeout').length;\n // node <= 10, default listenerCount is 1, onTimeout\n // 11 < node <= 12, default listenerCount is 2, onTimeout and emitRequestTimeout\n // node >= 13, default listenerCount is 3, onTimeout,\n // onTimeout(https://github.com/nodejs/node/pull/32000/files#diff-5f7fb0850412c6be189faeddea6c5359R333)\n // and emitRequestTimeout\n const timeout = getSocketTimeout(socket);\n const req = socket._httpMessage;\n const reqTimeoutListenerCount = req && req.listeners('timeout').length || 0;\n debug('%s(requests: %s, finished: %s) timeout after %sms, listeners %s, defaultTimeoutListenerCount %s, hasHttpRequest %s, HttpRequest timeoutListenerCount %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n timeout, listenerCount, defaultTimeoutListenerCount, !!req, reqTimeoutListenerCount);\n if (debug.enabled) {\n debug('timeout listeners: %s', socket.listeners('timeout').map(f => f.name).join(', '));\n }\n agent.timeoutSocketCount++;\n const name = agent.getName(options);\n if (agent.freeSockets[name] && agent.freeSockets[name].indexOf(socket) !== -1) {\n // free socket timeout, destroy quietly\n socket.destroy();\n // Remove it from freeSockets list immediately to prevent new requests\n // from being sent through this socket.\n agent.removeSocket(socket, options);\n debug('%s is free, destroy quietly', socket[SOCKET_NAME]);\n } else {\n // if there is no any request socket timeout handler,\n // agent need to handle socket timeout itself.\n //\n // custom request socket timeout handle logic must follow these rules:\n // 1. Destroy socket first\n // 2. Must emit socket 'agentRemove' event tell agent remove socket\n // from freeSockets list immediately.\n // Otherise you may be get 'socket hang up' error when reuse\n // free socket and timeout happen in the same time.\n if (reqTimeoutListenerCount === 0) {\n const error = new Error('Socket timeout');\n error.code = 'ERR_SOCKET_TIMEOUT';\n error.timeout = timeout;\n // must manually call socket.end() or socket.destroy() to end the connection.\n // https://nodejs.org/dist/latest-v10.x/docs/api/net.html#net_socket_settimeout_timeout_callback\n socket.destroy(error);\n agent.removeSocket(socket, options);\n debug('%s destroy with timeout error', socket[SOCKET_NAME]);\n }\n }\n }\n socket.on('timeout', onTimeout);\n\n function onError(err) {\n const listenerCount = socket.listeners('error').length;\n debug('%s(requests: %s, finished: %s) error: %s, listenerCount: %s',\n socket[SOCKET_NAME], socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT],\n err, listenerCount);\n agent.errorSocketCount++;\n if (listenerCount === 1) {\n // if socket don't contain error event handler, don't catch it, emit it again\n debug('%s emit uncaught error event', socket[SOCKET_NAME]);\n socket.removeListener('error', onError);\n socket.emit('error', err);\n }\n }\n socket.on('error', onError);\n\n function onRemove() {\n debug('%s(requests: %s, finished: %s) agentRemove',\n socket[SOCKET_NAME],\n socket[SOCKET_REQUEST_COUNT], socket[SOCKET_REQUEST_FINISHED_COUNT]);\n // We need this function for cases like HTTP 'upgrade'\n // (defined by WebSockets) where we need to remove a socket from the\n // pool because it'll be locked up indefinitely\n socket.removeListener('close', onClose);\n socket.removeListener('error', onError);\n socket.removeListener('free', onFree);\n socket.removeListener('timeout', onTimeout);\n socket.removeListener('agentRemove', onRemove);\n }\n socket.on('agentRemove', onRemove);\n}\n\nmodule.exports = Agent;\n\nfunction inspect(obj) {\n const res = {};\n for (const key in obj) {\n res[key] = obj[key].length;\n }\n return res;\n}\n","'use strict';\n\nmodule.exports = {\n // agent\n CURRENT_ID: Symbol('agentkeepalive#currentId'),\n CREATE_ID: Symbol('agentkeepalive#createId'),\n INIT_SOCKET: Symbol('agentkeepalive#initSocket'),\n CREATE_HTTPS_CONNECTION: Symbol('agentkeepalive#createHttpsConnection'),\n // socket\n SOCKET_CREATED_TIME: Symbol('agentkeepalive#socketCreatedTime'),\n SOCKET_NAME: Symbol('agentkeepalive#socketName'),\n SOCKET_REQUEST_COUNT: Symbol('agentkeepalive#socketRequestCount'),\n SOCKET_REQUEST_FINISHED_COUNT: Symbol('agentkeepalive#socketRequestFinishedCount'),\n};\n","'use strict';\n\nconst OriginalHttpsAgent = require('https').Agent;\nconst HttpAgent = require('./agent');\nconst {\n INIT_SOCKET,\n CREATE_HTTPS_CONNECTION,\n} = require('./constants');\n\nclass HttpsAgent extends HttpAgent {\n constructor(options) {\n super(options);\n\n this.defaultPort = 443;\n this.protocol = 'https:';\n this.maxCachedSessions = this.options.maxCachedSessions;\n /* istanbul ignore next */\n if (this.maxCachedSessions === undefined) {\n this.maxCachedSessions = 100;\n }\n\n this._sessionCache = {\n map: {},\n list: [],\n };\n }\n\n createConnection(options, oncreate) {\n const socket = this[CREATE_HTTPS_CONNECTION](options, oncreate);\n this[INIT_SOCKET](socket, options);\n return socket;\n }\n}\n\n// https://github.com/nodejs/node/blob/master/lib/https.js#L89\nHttpsAgent.prototype[CREATE_HTTPS_CONNECTION] = OriginalHttpsAgent.prototype.createConnection;\n\n[\n 'getName',\n '_getSession',\n '_cacheSession',\n // https://github.com/nodejs/node/pull/4982\n '_evictSession',\n].forEach(function(method) {\n /* istanbul ignore next */\n if (typeof OriginalHttpsAgent.prototype[method] === 'function') {\n HttpsAgent.prototype[method] = OriginalHttpsAgent.prototype[method];\n }\n});\n\nmodule.exports = HttpsAgent;\n","'use strict';\nmodule.exports = balanced;\nfunction balanced(a, b, str) {\n if (a instanceof RegExp) a = maybeMatch(a, str);\n if (b instanceof RegExp) b = maybeMatch(b, str);\n\n var r = range(a, b, str);\n\n return r && {\n start: r[0],\n end: r[1],\n pre: str.slice(0, r[0]),\n body: str.slice(r[0] + a.length, r[1]),\n post: str.slice(r[1] + b.length)\n };\n}\n\nfunction maybeMatch(reg, str) {\n var m = str.match(reg);\n return m ? m[0] : null;\n}\n\nbalanced.range = range;\nfunction range(a, b, str) {\n var begs, beg, left, right, result;\n var ai = str.indexOf(a);\n var bi = str.indexOf(b, ai + 1);\n var i = ai;\n\n if (ai >= 0 && bi > 0) {\n if(a===b) {\n return [ai, bi];\n }\n begs = [];\n left = str.length;\n\n while (i >= 0 && !result) {\n if (i == ai) {\n begs.push(i);\n ai = str.indexOf(a, i + 1);\n } else if (begs.length == 1) {\n result = [ begs.pop(), bi ];\n } else {\n beg = begs.pop();\n if (beg < left) {\n left = beg;\n right = bi;\n }\n\n bi = str.indexOf(b, i + 1);\n }\n\n i = ai < bi && ai >= 0 ? ai : bi;\n }\n\n if (begs.length) {\n result = [ left, right ];\n }\n }\n\n return result;\n}\n","var register = require(\"./lib/register\");\nvar addHook = require(\"./lib/add\");\nvar removeHook = require(\"./lib/remove\");\n\n// bind with array of arguments: https://stackoverflow.com/a/21792913\nvar bind = Function.bind;\nvar bindable = bind.bind(bind);\n\nfunction bindApi(hook, state, name) {\n var removeHookRef = bindable(removeHook, null).apply(\n null,\n name ? [state, name] : [state]\n );\n hook.api = { remove: removeHookRef };\n hook.remove = removeHookRef;\n [\"before\", \"error\", \"after\", \"wrap\"].forEach(function (kind) {\n var args = name ? [state, kind, name] : [state, kind];\n hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);\n });\n}\n\nfunction HookSingular() {\n var singularHookName = \"h\";\n var singularHookState = {\n registry: {},\n };\n var singularHook = register.bind(null, singularHookState, singularHookName);\n bindApi(singularHook, singularHookState, singularHookName);\n return singularHook;\n}\n\nfunction HookCollection() {\n var state = {\n registry: {},\n };\n\n var hook = register.bind(null, state);\n bindApi(hook, state);\n\n return hook;\n}\n\nvar collectionHookDeprecationMessageDisplayed = false;\nfunction Hook() {\n if (!collectionHookDeprecationMessageDisplayed) {\n console.warn(\n '[before-after-hook]: \"Hook()\" repurposing warning, use \"Hook.Collection()\". Read more: https://git.io/upgrade-before-after-hook-to-1.4'\n );\n collectionHookDeprecationMessageDisplayed = true;\n }\n return HookCollection();\n}\n\nHook.Singular = HookSingular.bind();\nHook.Collection = HookCollection.bind();\n\nmodule.exports = Hook;\n// expose constructors as a named property for TypeScript\nmodule.exports.Hook = Hook;\nmodule.exports.Singular = Hook.Singular;\nmodule.exports.Collection = Hook.Collection;\n","module.exports = addHook;\n\nfunction addHook(state, kind, name, hook) {\n var orig = hook;\n if (!state.registry[name]) {\n state.registry[name] = [];\n }\n\n if (kind === \"before\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(orig.bind(null, options))\n .then(method.bind(null, options));\n };\n }\n\n if (kind === \"after\") {\n hook = function (method, options) {\n var result;\n return Promise.resolve()\n .then(method.bind(null, options))\n .then(function (result_) {\n result = result_;\n return orig(result, options);\n })\n .then(function () {\n return result;\n });\n };\n }\n\n if (kind === \"error\") {\n hook = function (method, options) {\n return Promise.resolve()\n .then(method.bind(null, options))\n .catch(function (error) {\n return orig(error, options);\n });\n };\n }\n\n state.registry[name].push({\n hook: hook,\n orig: orig,\n });\n}\n","module.exports = register;\n\nfunction register(state, name, method, options) {\n if (typeof method !== \"function\") {\n throw new Error(\"method for before hook must be a function\");\n }\n\n if (!options) {\n options = {};\n }\n\n if (Array.isArray(name)) {\n return name.reverse().reduce(function (callback, name) {\n return register.bind(null, state, name, callback, options);\n }, method)();\n }\n\n return Promise.resolve().then(function () {\n if (!state.registry[name]) {\n return method(options);\n }\n\n return state.registry[name].reduce(function (method, registered) {\n return registered.hook.bind(null, method, options);\n }, method)();\n });\n}\n","module.exports = removeHook;\n\nfunction removeHook(state, name, method) {\n if (!state.registry[name]) {\n return;\n }\n\n var index = state.registry[name]\n .map(function (registered) {\n return registered.orig;\n })\n .indexOf(method);\n\n if (index === -1) {\n return;\n }\n\n state.registry[name].splice(index, 1);\n}\n","var balanced = require('balanced-match');\n\nmodule.exports = expandTop;\n\nvar escSlash = '\\0SLASH'+Math.random()+'\\0';\nvar escOpen = '\\0OPEN'+Math.random()+'\\0';\nvar escClose = '\\0CLOSE'+Math.random()+'\\0';\nvar escComma = '\\0COMMA'+Math.random()+'\\0';\nvar escPeriod = '\\0PERIOD'+Math.random()+'\\0';\n\nfunction numeric(str) {\n return parseInt(str, 10) == str\n ? parseInt(str, 10)\n : str.charCodeAt(0);\n}\n\nfunction escapeBraces(str) {\n return str.split('\\\\\\\\').join(escSlash)\n .split('\\\\{').join(escOpen)\n .split('\\\\}').join(escClose)\n .split('\\\\,').join(escComma)\n .split('\\\\.').join(escPeriod);\n}\n\nfunction unescapeBraces(str) {\n return str.split(escSlash).join('\\\\')\n .split(escOpen).join('{')\n .split(escClose).join('}')\n .split(escComma).join(',')\n .split(escPeriod).join('.');\n}\n\n\n// Basically just str.split(\",\"), but handling cases\n// where we have nested braced sections, which should be\n// treated as individual members, like {a,{b,c},d}\nfunction parseCommaParts(str) {\n if (!str)\n return [''];\n\n var parts = [];\n var m = balanced('{', '}', str);\n\n if (!m)\n return str.split(',');\n\n var pre = m.pre;\n var body = m.body;\n var post = m.post;\n var p = pre.split(',');\n\n p[p.length-1] += '{' + body + '}';\n var postParts = parseCommaParts(post);\n if (post.length) {\n p[p.length-1] += postParts.shift();\n p.push.apply(p, postParts);\n }\n\n parts.push.apply(parts, p);\n\n return parts;\n}\n\nfunction expandTop(str) {\n if (!str)\n return [];\n\n // I don't know why Bash 4.3 does this, but it does.\n // Anything starting with {} will have the first two bytes preserved\n // but *only* at the top level, so {},a}b will not expand to anything,\n // but a{},b}c will be expanded to [a}c,abc].\n // One could argue that this is a bug in Bash, but since the goal of\n // this module is to match Bash's rules, we escape a leading {}\n if (str.substr(0, 2) === '{}') {\n str = '\\\\{\\\\}' + str.substr(2);\n }\n\n return expand(escapeBraces(str), true).map(unescapeBraces);\n}\n\nfunction embrace(str) {\n return '{' + str + '}';\n}\nfunction isPadded(el) {\n return /^-?0\\d/.test(el);\n}\n\nfunction lte(i, y) {\n return i <= y;\n}\nfunction gte(i, y) {\n return i >= y;\n}\n\nfunction expand(str, isTop) {\n var expansions = [];\n\n var m = balanced('{', '}', str);\n if (!m) return [str];\n\n // no need to expand pre, since it is guaranteed to be free of brace-sets\n var pre = m.pre;\n var post = m.post.length\n ? expand(m.post, false)\n : [''];\n\n if (/\\$$/.test(m.pre)) { \n for (var k = 0; k < post.length; k++) {\n var expansion = pre+ '{' + m.body + '}' + post[k];\n expansions.push(expansion);\n }\n } else {\n var isNumericSequence = /^-?\\d+\\.\\.-?\\d+(?:\\.\\.-?\\d+)?$/.test(m.body);\n var isAlphaSequence = /^[a-zA-Z]\\.\\.[a-zA-Z](?:\\.\\.-?\\d+)?$/.test(m.body);\n var isSequence = isNumericSequence || isAlphaSequence;\n var isOptions = m.body.indexOf(',') >= 0;\n if (!isSequence && !isOptions) {\n // {a},b}\n if (m.post.match(/,.*\\}/)) {\n str = m.pre + '{' + m.body + escClose + m.post;\n return expand(str);\n }\n return [str];\n }\n\n var n;\n if (isSequence) {\n n = m.body.split(/\\.\\./);\n } else {\n n = parseCommaParts(m.body);\n if (n.length === 1) {\n // x{{a,b}}y ==> x{a}y x{b}y\n n = expand(n[0], false).map(embrace);\n if (n.length === 1) {\n return post.map(function(p) {\n return m.pre + n[0] + p;\n });\n }\n }\n }\n\n // at this point, n is the parts, and we know it's not a comma set\n // with a single entry.\n var N;\n\n if (isSequence) {\n var x = numeric(n[0]);\n var y = numeric(n[1]);\n var width = Math.max(n[0].length, n[1].length)\n var incr = n.length == 3\n ? Math.abs(numeric(n[2]))\n : 1;\n var test = lte;\n var reverse = y < x;\n if (reverse) {\n incr *= -1;\n test = gte;\n }\n var pad = n.some(isPadded);\n\n N = [];\n\n for (var i = x; test(i, y); i += incr) {\n var c;\n if (isAlphaSequence) {\n c = String.fromCharCode(i);\n if (c === '\\\\')\n c = '';\n } else {\n c = String(i);\n if (pad) {\n var need = width - c.length;\n if (need > 0) {\n var z = new Array(need + 1).join('0');\n if (i < 0)\n c = '-' + z + c.slice(1);\n else\n c = z + c;\n }\n }\n }\n N.push(c);\n }\n } else {\n N = [];\n\n for (var j = 0; j < n.length; j++) {\n N.push.apply(N, expand(n[j], false));\n }\n }\n\n for (var j = 0; j < N.length; j++) {\n for (var k = 0; k < post.length; k++) {\n var expansion = pre + N[j] + post[k];\n if (!isTop || isSequence || expansion)\n expansions.push(expansion);\n }\n }\n }\n\n return expansions;\n}\n\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nclass Deprecation extends Error {\n constructor(message) {\n super(message); // Maintains proper stack trace (only available on V8)\n\n /* istanbul ignore next */\n\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n\n this.name = 'Deprecation';\n }\n\n}\n\nexports.Deprecation = Deprecation;\n","/**\n * @author Toru Nagashima \n * @copyright 2015 Toru Nagashima. All rights reserved.\n * See LICENSE file in root directory for full license.\n */\n'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/**\n * @typedef {object} PrivateData\n * @property {EventTarget} eventTarget The event target.\n * @property {{type:string}} event The original event object.\n * @property {number} eventPhase The current event phase.\n * @property {EventTarget|null} currentTarget The current event target.\n * @property {boolean} canceled The flag to prevent default.\n * @property {boolean} stopped The flag to stop propagation.\n * @property {boolean} immediateStopped The flag to stop propagation immediately.\n * @property {Function|null} passiveListener The listener if the current listener is passive. Otherwise this is null.\n * @property {number} timeStamp The unix time.\n * @private\n */\n\n/**\n * Private data for event wrappers.\n * @type {WeakMap}\n * @private\n */\nconst privateData = new WeakMap();\n\n/**\n * Cache for wrapper classes.\n * @type {WeakMap}\n * @private\n */\nconst wrappers = new WeakMap();\n\n/**\n * Get private data.\n * @param {Event} event The event object to get private data.\n * @returns {PrivateData} The private data of the event.\n * @private\n */\nfunction pd(event) {\n const retv = privateData.get(event);\n console.assert(\n retv != null,\n \"'this' is expected an Event object, but got\",\n event\n );\n return retv\n}\n\n/**\n * https://dom.spec.whatwg.org/#set-the-canceled-flag\n * @param data {PrivateData} private data.\n */\nfunction setCancelFlag(data) {\n if (data.passiveListener != null) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(\n \"Unable to preventDefault inside passive event listener invocation.\",\n data.passiveListener\n );\n }\n return\n }\n if (!data.event.cancelable) {\n return\n }\n\n data.canceled = true;\n if (typeof data.event.preventDefault === \"function\") {\n data.event.preventDefault();\n }\n}\n\n/**\n * @see https://dom.spec.whatwg.org/#interface-event\n * @private\n */\n/**\n * The event wrapper.\n * @constructor\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Event|{type:string}} event The original event to wrap.\n */\nfunction Event(eventTarget, event) {\n privateData.set(this, {\n eventTarget,\n event,\n eventPhase: 2,\n currentTarget: eventTarget,\n canceled: false,\n stopped: false,\n immediateStopped: false,\n passiveListener: null,\n timeStamp: event.timeStamp || Date.now(),\n });\n\n // https://heycam.github.io/webidl/#Unforgeable\n Object.defineProperty(this, \"isTrusted\", { value: false, enumerable: true });\n\n // Define accessors\n const keys = Object.keys(event);\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in this)) {\n Object.defineProperty(this, key, defineRedirectDescriptor(key));\n }\n }\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEvent.prototype = {\n /**\n * The type of this event.\n * @type {string}\n */\n get type() {\n return pd(this).event.type\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get target() {\n return pd(this).eventTarget\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n */\n get currentTarget() {\n return pd(this).currentTarget\n },\n\n /**\n * @returns {EventTarget[]} The composed path of this event.\n */\n composedPath() {\n const currentTarget = pd(this).currentTarget;\n if (currentTarget == null) {\n return []\n }\n return [currentTarget]\n },\n\n /**\n * Constant of NONE.\n * @type {number}\n */\n get NONE() {\n return 0\n },\n\n /**\n * Constant of CAPTURING_PHASE.\n * @type {number}\n */\n get CAPTURING_PHASE() {\n return 1\n },\n\n /**\n * Constant of AT_TARGET.\n * @type {number}\n */\n get AT_TARGET() {\n return 2\n },\n\n /**\n * Constant of BUBBLING_PHASE.\n * @type {number}\n */\n get BUBBLING_PHASE() {\n return 3\n },\n\n /**\n * The target of this event.\n * @type {number}\n */\n get eventPhase() {\n return pd(this).eventPhase\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopPropagation() {\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.stopPropagation === \"function\") {\n data.event.stopPropagation();\n }\n },\n\n /**\n * Stop event bubbling.\n * @returns {void}\n */\n stopImmediatePropagation() {\n const data = pd(this);\n\n data.stopped = true;\n data.immediateStopped = true;\n if (typeof data.event.stopImmediatePropagation === \"function\") {\n data.event.stopImmediatePropagation();\n }\n },\n\n /**\n * The flag to be bubbling.\n * @type {boolean}\n */\n get bubbles() {\n return Boolean(pd(this).event.bubbles)\n },\n\n /**\n * The flag to be cancelable.\n * @type {boolean}\n */\n get cancelable() {\n return Boolean(pd(this).event.cancelable)\n },\n\n /**\n * Cancel this event.\n * @returns {void}\n */\n preventDefault() {\n setCancelFlag(pd(this));\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n */\n get defaultPrevented() {\n return pd(this).canceled\n },\n\n /**\n * The flag to be composed.\n * @type {boolean}\n */\n get composed() {\n return Boolean(pd(this).event.composed)\n },\n\n /**\n * The unix time of this event.\n * @type {number}\n */\n get timeStamp() {\n return pd(this).timeStamp\n },\n\n /**\n * The target of this event.\n * @type {EventTarget}\n * @deprecated\n */\n get srcElement() {\n return pd(this).eventTarget\n },\n\n /**\n * The flag to stop event bubbling.\n * @type {boolean}\n * @deprecated\n */\n get cancelBubble() {\n return pd(this).stopped\n },\n set cancelBubble(value) {\n if (!value) {\n return\n }\n const data = pd(this);\n\n data.stopped = true;\n if (typeof data.event.cancelBubble === \"boolean\") {\n data.event.cancelBubble = true;\n }\n },\n\n /**\n * The flag to indicate cancellation state.\n * @type {boolean}\n * @deprecated\n */\n get returnValue() {\n return !pd(this).canceled\n },\n set returnValue(value) {\n if (!value) {\n setCancelFlag(pd(this));\n }\n },\n\n /**\n * Initialize this event object. But do nothing under event dispatching.\n * @param {string} type The event type.\n * @param {boolean} [bubbles=false] The flag to be possible to bubble up.\n * @param {boolean} [cancelable=false] The flag to be possible to cancel.\n * @deprecated\n */\n initEvent() {\n // Do nothing.\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(Event.prototype, \"constructor\", {\n value: Event,\n configurable: true,\n writable: true,\n});\n\n// Ensure `event instanceof window.Event` is `true`.\nif (typeof window !== \"undefined\" && typeof window.Event !== \"undefined\") {\n Object.setPrototypeOf(Event.prototype, window.Event.prototype);\n\n // Make association for wrappers.\n wrappers.set(window.Event.prototype, Event);\n}\n\n/**\n * Get the property descriptor to redirect a given property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to redirect the property.\n * @private\n */\nfunction defineRedirectDescriptor(key) {\n return {\n get() {\n return pd(this).event[key]\n },\n set(value) {\n pd(this).event[key] = value;\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Get the property descriptor to call a given method property.\n * @param {string} key Property name to define property descriptor.\n * @returns {PropertyDescriptor} The property descriptor to call the method property.\n * @private\n */\nfunction defineCallDescriptor(key) {\n return {\n value() {\n const event = pd(this).event;\n return event[key].apply(event, arguments)\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define new wrapper class.\n * @param {Function} BaseEvent The base wrapper class.\n * @param {Object} proto The prototype of the original event.\n * @returns {Function} The defined wrapper class.\n * @private\n */\nfunction defineWrapper(BaseEvent, proto) {\n const keys = Object.keys(proto);\n if (keys.length === 0) {\n return BaseEvent\n }\n\n /** CustomEvent */\n function CustomEvent(eventTarget, event) {\n BaseEvent.call(this, eventTarget, event);\n }\n\n CustomEvent.prototype = Object.create(BaseEvent.prototype, {\n constructor: { value: CustomEvent, configurable: true, writable: true },\n });\n\n // Define accessors.\n for (let i = 0; i < keys.length; ++i) {\n const key = keys[i];\n if (!(key in BaseEvent.prototype)) {\n const descriptor = Object.getOwnPropertyDescriptor(proto, key);\n const isFunc = typeof descriptor.value === \"function\";\n Object.defineProperty(\n CustomEvent.prototype,\n key,\n isFunc\n ? defineCallDescriptor(key)\n : defineRedirectDescriptor(key)\n );\n }\n }\n\n return CustomEvent\n}\n\n/**\n * Get the wrapper class of a given prototype.\n * @param {Object} proto The prototype of the original event to get its wrapper.\n * @returns {Function} The wrapper class.\n * @private\n */\nfunction getWrapper(proto) {\n if (proto == null || proto === Object.prototype) {\n return Event\n }\n\n let wrapper = wrappers.get(proto);\n if (wrapper == null) {\n wrapper = defineWrapper(getWrapper(Object.getPrototypeOf(proto)), proto);\n wrappers.set(proto, wrapper);\n }\n return wrapper\n}\n\n/**\n * Wrap a given event to management a dispatching.\n * @param {EventTarget} eventTarget The event target of this dispatching.\n * @param {Object} event The event to wrap.\n * @returns {Event} The wrapper instance.\n * @private\n */\nfunction wrapEvent(eventTarget, event) {\n const Wrapper = getWrapper(Object.getPrototypeOf(event));\n return new Wrapper(eventTarget, event)\n}\n\n/**\n * Get the immediateStopped flag of a given event.\n * @param {Event} event The event to get.\n * @returns {boolean} The flag to stop propagation immediately.\n * @private\n */\nfunction isStopped(event) {\n return pd(event).immediateStopped\n}\n\n/**\n * Set the current event phase of a given event.\n * @param {Event} event The event to set current target.\n * @param {number} eventPhase New event phase.\n * @returns {void}\n * @private\n */\nfunction setEventPhase(event, eventPhase) {\n pd(event).eventPhase = eventPhase;\n}\n\n/**\n * Set the current target of a given event.\n * @param {Event} event The event to set current target.\n * @param {EventTarget|null} currentTarget New current target.\n * @returns {void}\n * @private\n */\nfunction setCurrentTarget(event, currentTarget) {\n pd(event).currentTarget = currentTarget;\n}\n\n/**\n * Set a passive listener of a given event.\n * @param {Event} event The event to set current target.\n * @param {Function|null} passiveListener New passive listener.\n * @returns {void}\n * @private\n */\nfunction setPassiveListener(event, passiveListener) {\n pd(event).passiveListener = passiveListener;\n}\n\n/**\n * @typedef {object} ListenerNode\n * @property {Function} listener\n * @property {1|2|3} listenerType\n * @property {boolean} passive\n * @property {boolean} once\n * @property {ListenerNode|null} next\n * @private\n */\n\n/**\n * @type {WeakMap>}\n * @private\n */\nconst listenersMap = new WeakMap();\n\n// Listener types\nconst CAPTURE = 1;\nconst BUBBLE = 2;\nconst ATTRIBUTE = 3;\n\n/**\n * Check whether a given value is an object or not.\n * @param {any} x The value to check.\n * @returns {boolean} `true` if the value is an object.\n */\nfunction isObject(x) {\n return x !== null && typeof x === \"object\" //eslint-disable-line no-restricted-syntax\n}\n\n/**\n * Get listeners.\n * @param {EventTarget} eventTarget The event target to get.\n * @returns {Map} The listeners.\n * @private\n */\nfunction getListeners(eventTarget) {\n const listeners = listenersMap.get(eventTarget);\n if (listeners == null) {\n throw new TypeError(\n \"'this' is expected an EventTarget object, but got another value.\"\n )\n }\n return listeners\n}\n\n/**\n * Get the property descriptor for the event attribute of a given event.\n * @param {string} eventName The event name to get property descriptor.\n * @returns {PropertyDescriptor} The property descriptor.\n * @private\n */\nfunction defineEventAttributeDescriptor(eventName) {\n return {\n get() {\n const listeners = getListeners(this);\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n return node.listener\n }\n node = node.next;\n }\n return null\n },\n\n set(listener) {\n if (typeof listener !== \"function\" && !isObject(listener)) {\n listener = null; // eslint-disable-line no-param-reassign\n }\n const listeners = getListeners(this);\n\n // Traverse to the tail while removing old value.\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (node.listenerType === ATTRIBUTE) {\n // Remove old value.\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n node = node.next;\n }\n\n // Add new value.\n if (listener !== null) {\n const newNode = {\n listener,\n listenerType: ATTRIBUTE,\n passive: false,\n once: false,\n next: null,\n };\n if (prev === null) {\n listeners.set(eventName, newNode);\n } else {\n prev.next = newNode;\n }\n }\n },\n configurable: true,\n enumerable: true,\n }\n}\n\n/**\n * Define an event attribute (e.g. `eventTarget.onclick`).\n * @param {Object} eventTargetPrototype The event target prototype to define an event attrbite.\n * @param {string} eventName The event name to define.\n * @returns {void}\n */\nfunction defineEventAttribute(eventTargetPrototype, eventName) {\n Object.defineProperty(\n eventTargetPrototype,\n `on${eventName}`,\n defineEventAttributeDescriptor(eventName)\n );\n}\n\n/**\n * Define a custom EventTarget with event attributes.\n * @param {string[]} eventNames Event names for event attributes.\n * @returns {EventTarget} The custom EventTarget.\n * @private\n */\nfunction defineCustomEventTarget(eventNames) {\n /** CustomEventTarget */\n function CustomEventTarget() {\n EventTarget.call(this);\n }\n\n CustomEventTarget.prototype = Object.create(EventTarget.prototype, {\n constructor: {\n value: CustomEventTarget,\n configurable: true,\n writable: true,\n },\n });\n\n for (let i = 0; i < eventNames.length; ++i) {\n defineEventAttribute(CustomEventTarget.prototype, eventNames[i]);\n }\n\n return CustomEventTarget\n}\n\n/**\n * EventTarget.\n *\n * - This is constructor if no arguments.\n * - This is a function which returns a CustomEventTarget constructor if there are arguments.\n *\n * For example:\n *\n * class A extends EventTarget {}\n * class B extends EventTarget(\"message\") {}\n * class C extends EventTarget(\"message\", \"error\") {}\n * class D extends EventTarget([\"message\", \"error\"]) {}\n */\nfunction EventTarget() {\n /*eslint-disable consistent-return */\n if (this instanceof EventTarget) {\n listenersMap.set(this, new Map());\n return\n }\n if (arguments.length === 1 && Array.isArray(arguments[0])) {\n return defineCustomEventTarget(arguments[0])\n }\n if (arguments.length > 0) {\n const types = new Array(arguments.length);\n for (let i = 0; i < arguments.length; ++i) {\n types[i] = arguments[i];\n }\n return defineCustomEventTarget(types)\n }\n throw new TypeError(\"Cannot call a class as a function\")\n /*eslint-enable consistent-return */\n}\n\n// Should be enumerable, but class methods are not enumerable.\nEventTarget.prototype = {\n /**\n * Add a given listener to this event target.\n * @param {string} eventName The event name to add.\n * @param {Function} listener The listener to add.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n addEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n if (typeof listener !== \"function\" && !isObject(listener)) {\n throw new TypeError(\"'listener' should be a function or an object.\")\n }\n\n const listeners = getListeners(this);\n const optionsIsObj = isObject(options);\n const capture = optionsIsObj\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n const newNode = {\n listener,\n listenerType,\n passive: optionsIsObj && Boolean(options.passive),\n once: optionsIsObj && Boolean(options.once),\n next: null,\n };\n\n // Set it as the first node if the first node is null.\n let node = listeners.get(eventName);\n if (node === undefined) {\n listeners.set(eventName, newNode);\n return\n }\n\n // Traverse to the tail while checking duplication..\n let prev = null;\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n // Should ignore duplication.\n return\n }\n prev = node;\n node = node.next;\n }\n\n // Add it.\n prev.next = newNode;\n },\n\n /**\n * Remove a given listener from this event target.\n * @param {string} eventName The event name to remove.\n * @param {Function} listener The listener to remove.\n * @param {boolean|{capture?:boolean,passive?:boolean,once?:boolean}} [options] The options for this listener.\n * @returns {void}\n */\n removeEventListener(eventName, listener, options) {\n if (listener == null) {\n return\n }\n\n const listeners = getListeners(this);\n const capture = isObject(options)\n ? Boolean(options.capture)\n : Boolean(options);\n const listenerType = capture ? CAPTURE : BUBBLE;\n\n let prev = null;\n let node = listeners.get(eventName);\n while (node != null) {\n if (\n node.listener === listener &&\n node.listenerType === listenerType\n ) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n return\n }\n\n prev = node;\n node = node.next;\n }\n },\n\n /**\n * Dispatch a given event.\n * @param {Event|{type:string}} event The event to dispatch.\n * @returns {boolean} `false` if canceled.\n */\n dispatchEvent(event) {\n if (event == null || typeof event.type !== \"string\") {\n throw new TypeError('\"event.type\" should be a string.')\n }\n\n // If listeners aren't registered, terminate.\n const listeners = getListeners(this);\n const eventName = event.type;\n let node = listeners.get(eventName);\n if (node == null) {\n return true\n }\n\n // Since we cannot rewrite several properties, so wrap object.\n const wrappedEvent = wrapEvent(this, event);\n\n // This doesn't process capturing phase and bubbling phase.\n // This isn't participating in a tree.\n let prev = null;\n while (node != null) {\n // Remove this listener if it's once\n if (node.once) {\n if (prev !== null) {\n prev.next = node.next;\n } else if (node.next !== null) {\n listeners.set(eventName, node.next);\n } else {\n listeners.delete(eventName);\n }\n } else {\n prev = node;\n }\n\n // Call this listener\n setPassiveListener(\n wrappedEvent,\n node.passive ? node.listener : null\n );\n if (typeof node.listener === \"function\") {\n try {\n node.listener.call(this, wrappedEvent);\n } catch (err) {\n if (\n typeof console !== \"undefined\" &&\n typeof console.error === \"function\"\n ) {\n console.error(err);\n }\n }\n } else if (\n node.listenerType !== ATTRIBUTE &&\n typeof node.listener.handleEvent === \"function\"\n ) {\n node.listener.handleEvent(wrappedEvent);\n }\n\n // Break if `event.stopImmediatePropagation` was called.\n if (isStopped(wrappedEvent)) {\n break\n }\n\n node = node.next;\n }\n setPassiveListener(wrappedEvent, null);\n setEventPhase(wrappedEvent, 0);\n setCurrentTarget(wrappedEvent, null);\n\n return !wrappedEvent.defaultPrevented\n },\n};\n\n// `constructor` is not enumerable.\nObject.defineProperty(EventTarget.prototype, \"constructor\", {\n value: EventTarget,\n configurable: true,\n writable: true,\n});\n\n// Ensure `eventTarget instanceof window.EventTarget` is `true`.\nif (\n typeof window !== \"undefined\" &&\n typeof window.EventTarget !== \"undefined\"\n) {\n Object.setPrototypeOf(EventTarget.prototype, window.EventTarget.prototype);\n}\n\nexports.defineEventAttribute = defineEventAttribute;\nexports.EventTarget = EventTarget;\nexports.default = EventTarget;\n\nmodule.exports = EventTarget\nmodule.exports.EventTarget = module.exports[\"default\"] = EventTarget\nmodule.exports.defineEventAttribute = defineEventAttribute\n//# sourceMappingURL=event-target-shim.js.map\n","/**\n * @license\n * web-streams-polyfill v4.0.0-beta.3\n * Copyright 2021 Mattias Buelens, Diwank Singh Tomer and other contributors.\n * This code is released under the MIT license.\n * SPDX-License-Identifier: MIT\n */\n!function(e,t){\"object\"==typeof exports&&\"undefined\"!=typeof module?t(exports):\"function\"==typeof define&&define.amd?define([\"exports\"],t):t((e=\"undefined\"!=typeof globalThis?globalThis:e||self).WebStreamsPolyfill={})}(this,(function(e){\"use strict\";const t=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?Symbol:e=>`Symbol(${e})`;function r(){}function o(e){return\"object\"==typeof e&&null!==e||\"function\"==typeof e}const n=r;function a(e,t){try{Object.defineProperty(e,\"name\",{value:t,configurable:!0})}catch(e){}}const i=Promise,l=Promise.prototype.then,s=Promise.resolve.bind(i),u=Promise.reject.bind(i);function c(e){return new i(e)}function d(e){return s(e)}function f(e){return u(e)}function b(e,t,r){return l.call(e,t,r)}function h(e,t,r){b(b(e,t,r),void 0,n)}function _(e,t){h(e,t)}function p(e,t){h(e,void 0,t)}function m(e,t,r){return b(e,t,r)}function y(e){b(e,void 0,n)}let g=e=>{if(\"function\"==typeof queueMicrotask)g=queueMicrotask;else{const e=d(void 0);g=t=>b(e,t)}return g(e)};function S(e,t,r){if(\"function\"!=typeof e)throw new TypeError(\"Argument is not a function\");return Function.prototype.apply.call(e,t,r)}function w(e,t,r){try{return d(S(e,t,r))}catch(e){return f(e)}}class v{constructor(){this._cursor=0,this._size=0,this._front={_elements:[],_next:void 0},this._back=this._front,this._cursor=0,this._size=0}get length(){return this._size}push(e){const t=this._back;let r=t;16383===t._elements.length&&(r={_elements:[],_next:void 0}),t._elements.push(e),r!==t&&(this._back=r,t._next=r),++this._size}shift(){const e=this._front;let t=e;const r=this._cursor;let o=r+1;const n=e._elements,a=n[r];return 16384===o&&(t=e._next,o=0),--this._size,this._cursor=o,e!==t&&(this._front=t),n[r]=void 0,a}forEach(e){let t=this._cursor,r=this._front,o=r._elements;for(;!(t===o.length&&void 0===r._next||t===o.length&&(r=r._next,o=r._elements,t=0,0===o.length));)e(o[t]),++t}peek(){const e=this._front,t=this._cursor;return e._elements[t]}}const R=t(\"[[AbortSteps]]\"),T=t(\"[[ErrorSteps]]\"),q=t(\"[[CancelSteps]]\"),C=t(\"[[PullSteps]]\"),P=t(\"[[ReleaseSteps]]\");function E(e,t){e._ownerReadableStream=t,t._reader=e,\"readable\"===t._state?B(e):\"closed\"===t._state?function(e){B(e),z(e)}(e):A(e,t._storedError)}function W(e,t){return Xt(e._ownerReadableStream,t)}function O(e){const t=e._ownerReadableStream;\"readable\"===t._state?j(e,new TypeError(\"Reader was released and can no longer be used to monitor the stream's closedness\")):function(e,t){A(e,t)}(e,new TypeError(\"Reader was released and can no longer be used to monitor the stream's closedness\")),t._readableStreamController[P](),t._reader=void 0,e._ownerReadableStream=void 0}function k(e){return new TypeError(\"Cannot \"+e+\" a stream using a released reader\")}function B(e){e._closedPromise=c(((t,r)=>{e._closedPromise_resolve=t,e._closedPromise_reject=r}))}function A(e,t){B(e),j(e,t)}function j(e,t){void 0!==e._closedPromise_reject&&(y(e._closedPromise),e._closedPromise_reject(t),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0)}function z(e){void 0!==e._closedPromise_resolve&&(e._closedPromise_resolve(void 0),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0)}const L=Number.isFinite||function(e){return\"number\"==typeof e&&isFinite(e)},F=Math.trunc||function(e){return e<0?Math.ceil(e):Math.floor(e)};function D(e,t){if(void 0!==e&&(\"object\"!=typeof(r=e)&&\"function\"!=typeof r))throw new TypeError(`${t} is not an object.`);var r}function I(e,t){if(\"function\"!=typeof e)throw new TypeError(`${t} is not a function.`)}function $(e,t){if(!function(e){return\"object\"==typeof e&&null!==e||\"function\"==typeof e}(e))throw new TypeError(`${t} is not an object.`)}function M(e,t,r){if(void 0===e)throw new TypeError(`Parameter ${t} is required in '${r}'.`)}function Y(e,t,r){if(void 0===e)throw new TypeError(`${t} is required in '${r}'.`)}function Q(e){return Number(e)}function N(e){return 0===e?0:e}function x(e,t){const r=Number.MAX_SAFE_INTEGER;let o=Number(e);if(o=N(o),!L(o))throw new TypeError(`${t} is not a finite number`);if(o=function(e){return N(F(e))}(o),o<0||o>r)throw new TypeError(`${t} is outside the accepted range of 0 to ${r}, inclusive`);return L(o)&&0!==o?o:0}function H(e){if(!o(e))return!1;if(\"function\"!=typeof e.getReader)return!1;try{return\"boolean\"==typeof e.locked}catch(e){return!1}}function V(e){if(!o(e))return!1;if(\"function\"!=typeof e.getWriter)return!1;try{return\"boolean\"==typeof e.locked}catch(e){return!1}}function U(e,t){if(!Ut(e))throw new TypeError(`${t} is not a ReadableStream.`)}function G(e,t){e._reader._readRequests.push(t)}function X(e,t,r){const o=e._reader._readRequests.shift();r?o._closeSteps():o._chunkSteps(t)}function J(e){return e._reader._readRequests.length}function K(e){const t=e._reader;return void 0!==t&&!!Z(t)}class ReadableStreamDefaultReader{constructor(e){if(M(e,1,\"ReadableStreamDefaultReader\"),U(e,\"First parameter\"),Gt(e))throw new TypeError(\"This stream has already been locked for exclusive reading by another reader\");E(this,e),this._readRequests=new v}get closed(){return Z(this)?this._closedPromise:f(te(\"closed\"))}cancel(e){return Z(this)?void 0===this._ownerReadableStream?f(k(\"cancel\")):W(this,e):f(te(\"cancel\"))}read(){if(!Z(this))return f(te(\"read\"));if(void 0===this._ownerReadableStream)return f(k(\"read from\"));let e,t;const r=c(((r,o)=>{e=r,t=o}));return function(e,t){const r=e._ownerReadableStream;r._disturbed=!0,\"closed\"===r._state?t._closeSteps():\"errored\"===r._state?t._errorSteps(r._storedError):r._readableStreamController[C](t)}(this,{_chunkSteps:t=>e({value:t,done:!1}),_closeSteps:()=>e({value:void 0,done:!0}),_errorSteps:e=>t(e)}),r}releaseLock(){if(!Z(this))throw te(\"releaseLock\");void 0!==this._ownerReadableStream&&function(e){O(e);const t=new TypeError(\"Reader was released\");ee(e,t)}(this)}}function Z(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readRequests\")&&e instanceof ReadableStreamDefaultReader)}function ee(e,t){const r=e._readRequests;e._readRequests=new v,r.forEach((e=>{e._errorSteps(t)}))}function te(e){return new TypeError(`ReadableStreamDefaultReader.prototype.${e} can only be used on a ReadableStreamDefaultReader`)}Object.defineProperties(ReadableStreamDefaultReader.prototype,{cancel:{enumerable:!0},read:{enumerable:!0},releaseLock:{enumerable:!0},closed:{enumerable:!0}}),a(ReadableStreamDefaultReader.prototype.cancel,\"cancel\"),a(ReadableStreamDefaultReader.prototype.read,\"read\"),a(ReadableStreamDefaultReader.prototype.releaseLock,\"releaseLock\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamDefaultReader.prototype,t.toStringTag,{value:\"ReadableStreamDefaultReader\",configurable:!0});class re{constructor(e,t){this._ongoingPromise=void 0,this._isFinished=!1,this._reader=e,this._preventCancel=t}next(){const e=()=>this._nextSteps();return this._ongoingPromise=this._ongoingPromise?m(this._ongoingPromise,e,e):e(),this._ongoingPromise}return(e){const t=()=>this._returnSteps(e);return this._ongoingPromise?m(this._ongoingPromise,t,t):t()}_nextSteps(){if(this._isFinished)return Promise.resolve({value:void 0,done:!0});const e=this._reader;return void 0===e?f(k(\"iterate\")):b(e.read(),(e=>{var t;return this._ongoingPromise=void 0,e.done&&(this._isFinished=!0,null===(t=this._reader)||void 0===t||t.releaseLock(),this._reader=void 0),e}),(e=>{var t;throw this._ongoingPromise=void 0,this._isFinished=!0,null===(t=this._reader)||void 0===t||t.releaseLock(),this._reader=void 0,e}))}_returnSteps(e){if(this._isFinished)return Promise.resolve({value:e,done:!0});this._isFinished=!0;const t=this._reader;if(void 0===t)return f(k(\"finish iterating\"));if(this._reader=void 0,!this._preventCancel){const r=t.cancel(e);return t.releaseLock(),m(r,(()=>({value:e,done:!0})))}return t.releaseLock(),d({value:e,done:!0})}}const oe={next(){return ne(this)?this._asyncIteratorImpl.next():f(ae(\"next\"))},return(e){return ne(this)?this._asyncIteratorImpl.return(e):f(ae(\"return\"))}};function ne(e){if(!o(e))return!1;if(!Object.prototype.hasOwnProperty.call(e,\"_asyncIteratorImpl\"))return!1;try{return e._asyncIteratorImpl instanceof re}catch(e){return!1}}function ae(e){return new TypeError(`ReadableStreamAsyncIterator.${e} can only be used on a ReadableSteamAsyncIterator`)}\"symbol\"==typeof t.asyncIterator&&Object.defineProperty(oe,t.asyncIterator,{value(){return this},writable:!0,configurable:!0});const ie=Number.isNaN||function(e){return e!=e};function le(e,t,r,o,n){new Uint8Array(e).set(new Uint8Array(r,o,n),t)}function se(e){const t=function(e,t,r){if(e.slice)return e.slice(t,r);const o=r-t,n=new ArrayBuffer(o);return le(n,0,e,t,o),n}(e.buffer,e.byteOffset,e.byteOffset+e.byteLength);return new Uint8Array(t)}function ue(e){const t=e._queue.shift();return e._queueTotalSize-=t.size,e._queueTotalSize<0&&(e._queueTotalSize=0),t.value}function ce(e,t,r){if(\"number\"!=typeof(o=r)||ie(o)||o<0||r===1/0)throw new RangeError(\"Size must be a finite, non-NaN, non-negative number.\");var o;e._queue.push({value:t,size:r}),e._queueTotalSize+=r}function de(e){e._queue=new v,e._queueTotalSize=0}class ReadableStreamBYOBRequest{constructor(){throw new TypeError(\"Illegal constructor\")}get view(){if(!be(this))throw Ae(\"view\");return this._view}respond(e){if(!be(this))throw Ae(\"respond\");if(M(e,1,\"respond\"),e=x(e,\"First parameter\"),void 0===this._associatedReadableByteStreamController)throw new TypeError(\"This BYOB request has been invalidated\");this._view.buffer,function(e,t){const r=e._pendingPullIntos.peek();if(\"closed\"===e._controlledReadableByteStream._state){if(0!==t)throw new TypeError(\"bytesWritten must be 0 when calling respond() on a closed stream\")}else{if(0===t)throw new TypeError(\"bytesWritten must be greater than 0 when calling respond() on a readable stream\");if(r.bytesFilled+t>r.byteLength)throw new RangeError(\"bytesWritten out of range\")}r.buffer=r.buffer,Ce(e,t)}(this._associatedReadableByteStreamController,e)}respondWithNewView(e){if(!be(this))throw Ae(\"respondWithNewView\");if(M(e,1,\"respondWithNewView\"),!ArrayBuffer.isView(e))throw new TypeError(\"You can only respond with array buffer views\");if(void 0===this._associatedReadableByteStreamController)throw new TypeError(\"This BYOB request has been invalidated\");e.buffer,function(e,t){const r=e._pendingPullIntos.peek();if(\"closed\"===e._controlledReadableByteStream._state){if(0!==t.byteLength)throw new TypeError(\"The view's length must be 0 when calling respondWithNewView() on a closed stream\")}else if(0===t.byteLength)throw new TypeError(\"The view's length must be greater than 0 when calling respondWithNewView() on a readable stream\");if(r.byteOffset+r.bytesFilled!==t.byteOffset)throw new RangeError(\"The region specified by view does not match byobRequest\");if(r.bufferByteLength!==t.buffer.byteLength)throw new RangeError(\"The buffer of view has different capacity than byobRequest\");if(r.bytesFilled+t.byteLength>r.byteLength)throw new RangeError(\"The region specified by view is larger than byobRequest\");const o=t.byteLength;r.buffer=t.buffer,Ce(e,o)}(this._associatedReadableByteStreamController,e)}}Object.defineProperties(ReadableStreamBYOBRequest.prototype,{respond:{enumerable:!0},respondWithNewView:{enumerable:!0},view:{enumerable:!0}}),a(ReadableStreamBYOBRequest.prototype.respond,\"respond\"),a(ReadableStreamBYOBRequest.prototype.respondWithNewView,\"respondWithNewView\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamBYOBRequest.prototype,t.toStringTag,{value:\"ReadableStreamBYOBRequest\",configurable:!0});class ReadableByteStreamController{constructor(){throw new TypeError(\"Illegal constructor\")}get byobRequest(){if(!fe(this))throw je(\"byobRequest\");return function(e){if(null===e._byobRequest&&e._pendingPullIntos.length>0){const t=e._pendingPullIntos.peek(),r=new Uint8Array(t.buffer,t.byteOffset+t.bytesFilled,t.byteLength-t.bytesFilled),o=Object.create(ReadableStreamBYOBRequest.prototype);!function(e,t,r){e._associatedReadableByteStreamController=t,e._view=r}(o,e,r),e._byobRequest=o}return e._byobRequest}(this)}get desiredSize(){if(!fe(this))throw je(\"desiredSize\");return ke(this)}close(){if(!fe(this))throw je(\"close\");if(this._closeRequested)throw new TypeError(\"The stream has already been closed; do not close it again!\");const e=this._controlledReadableByteStream._state;if(\"readable\"!==e)throw new TypeError(`The stream (in ${e} state) is not in the readable state and cannot be closed`);!function(e){const t=e._controlledReadableByteStream;if(e._closeRequested||\"readable\"!==t._state)return;if(e._queueTotalSize>0)return void(e._closeRequested=!0);if(e._pendingPullIntos.length>0){if(e._pendingPullIntos.peek().bytesFilled>0){const t=new TypeError(\"Insufficient bytes to fill elements in the given buffer\");throw We(e,t),t}}Ee(e),Jt(t)}(this)}enqueue(e){if(!fe(this))throw je(\"enqueue\");if(M(e,1,\"enqueue\"),!ArrayBuffer.isView(e))throw new TypeError(\"chunk must be an array buffer view\");if(0===e.byteLength)throw new TypeError(\"chunk must have non-zero byteLength\");if(0===e.buffer.byteLength)throw new TypeError(\"chunk's buffer must have non-zero byteLength\");if(this._closeRequested)throw new TypeError(\"stream is closed or draining\");const t=this._controlledReadableByteStream._state;if(\"readable\"!==t)throw new TypeError(`The stream (in ${t} state) is not in the readable state and cannot be enqueued to`);!function(e,t){const r=e._controlledReadableByteStream;if(e._closeRequested||\"readable\"!==r._state)return;const o=t.buffer,n=t.byteOffset,a=t.byteLength,i=o;if(e._pendingPullIntos.length>0){const t=e._pendingPullIntos.peek();t.buffer,0,Te(e),t.buffer=t.buffer,\"none\"===t.readerType&&Se(e,t)}if(K(r))if(function(e){const t=e._controlledReadableByteStream._reader;for(;t._readRequests.length>0;){if(0===e._queueTotalSize)return;Oe(e,t._readRequests.shift())}}(e),0===J(r))ye(e,i,n,a);else{e._pendingPullIntos.length>0&&Pe(e);X(r,new Uint8Array(i,n,a),!1)}else Fe(r)?(ye(e,i,n,a),qe(e)):ye(e,i,n,a);he(e)}(this,e)}error(e){if(!fe(this))throw je(\"error\");We(this,e)}[q](e){_e(this),de(this);const t=this._cancelAlgorithm(e);return Ee(this),t}[C](e){const t=this._controlledReadableByteStream;if(this._queueTotalSize>0)return void Oe(this,e);const r=this._autoAllocateChunkSize;if(void 0!==r){let t;try{t=new ArrayBuffer(r)}catch(t){return void e._errorSteps(t)}const o={buffer:t,bufferByteLength:r,byteOffset:0,byteLength:r,bytesFilled:0,elementSize:1,viewConstructor:Uint8Array,readerType:\"default\"};this._pendingPullIntos.push(o)}G(t,e),he(this)}[P](){if(this._pendingPullIntos.length>0){const e=this._pendingPullIntos.peek();e.readerType=\"none\",this._pendingPullIntos=new v,this._pendingPullIntos.push(e)}}}function fe(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledReadableByteStream\")&&e instanceof ReadableByteStreamController)}function be(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_associatedReadableByteStreamController\")&&e instanceof ReadableStreamBYOBRequest)}function he(e){const t=function(e){const t=e._controlledReadableByteStream;if(\"readable\"!==t._state)return!1;if(e._closeRequested)return!1;if(!e._started)return!1;if(K(t)&&J(t)>0)return!0;if(Fe(t)&&Le(t)>0)return!0;if(ke(e)>0)return!0;return!1}(e);if(!t)return;if(e._pulling)return void(e._pullAgain=!0);e._pulling=!0;h(e._pullAlgorithm(),(()=>(e._pulling=!1,e._pullAgain&&(e._pullAgain=!1,he(e)),null)),(t=>(We(e,t),null)))}function _e(e){Te(e),e._pendingPullIntos=new v}function pe(e,t){let r=!1;\"closed\"===e._state&&(r=!0);const o=me(t);\"default\"===t.readerType?X(e,o,r):function(e,t,r){const o=e._reader._readIntoRequests.shift();r?o._closeSteps(t):o._chunkSteps(t)}(e,o,r)}function me(e){const t=e.bytesFilled,r=e.elementSize;return new e.viewConstructor(e.buffer,e.byteOffset,t/r)}function ye(e,t,r,o){e._queue.push({buffer:t,byteOffset:r,byteLength:o}),e._queueTotalSize+=o}function ge(e,t,r,o){let n;try{n=t.slice(r,r+o)}catch(t){throw We(e,t),t}ye(e,n,0,o)}function Se(e,t){t.bytesFilled>0&&ge(e,t.buffer,t.byteOffset,t.bytesFilled),Pe(e)}function we(e,t){const r=t.elementSize,o=t.bytesFilled-t.bytesFilled%r,n=Math.min(e._queueTotalSize,t.byteLength-t.bytesFilled),a=t.bytesFilled+n,i=a-a%r;let l=n,s=!1;i>o&&(l=i-t.bytesFilled,s=!0);const u=e._queue;for(;l>0;){const r=u.peek(),o=Math.min(l,r.byteLength),n=t.byteOffset+t.bytesFilled;le(t.buffer,n,r.buffer,r.byteOffset,o),r.byteLength===o?u.shift():(r.byteOffset+=o,r.byteLength-=o),e._queueTotalSize-=o,ve(e,o,t),l-=o}return s}function ve(e,t,r){r.bytesFilled+=t}function Re(e){0===e._queueTotalSize&&e._closeRequested?(Ee(e),Jt(e._controlledReadableByteStream)):he(e)}function Te(e){null!==e._byobRequest&&(e._byobRequest._associatedReadableByteStreamController=void 0,e._byobRequest._view=null,e._byobRequest=null)}function qe(e){for(;e._pendingPullIntos.length>0;){if(0===e._queueTotalSize)return;const t=e._pendingPullIntos.peek();we(e,t)&&(Pe(e),pe(e._controlledReadableByteStream,t))}}function Ce(e,t){const r=e._pendingPullIntos.peek();Te(e);\"closed\"===e._controlledReadableByteStream._state?function(e,t){\"none\"===t.readerType&&Pe(e);const r=e._controlledReadableByteStream;if(Fe(r))for(;Le(r)>0;)pe(r,Pe(e))}(e,r):function(e,t,r){if(ve(0,t,r),\"none\"===r.readerType)return Se(e,r),void qe(e);if(r.bytesFilled0){const t=r.byteOffset+r.bytesFilled;ge(e,r.buffer,t-o,o)}r.bytesFilled-=o,pe(e._controlledReadableByteStream,r),qe(e)}(e,t,r),he(e)}function Pe(e){return e._pendingPullIntos.shift()}function Ee(e){e._pullAlgorithm=void 0,e._cancelAlgorithm=void 0}function We(e,t){const r=e._controlledReadableByteStream;\"readable\"===r._state&&(_e(e),de(e),Ee(e),Kt(r,t))}function Oe(e,t){const r=e._queue.shift();e._queueTotalSize-=r.byteLength,Re(e);const o=new Uint8Array(r.buffer,r.byteOffset,r.byteLength);t._chunkSteps(o)}function ke(e){const t=e._controlledReadableByteStream._state;return\"errored\"===t?null:\"closed\"===t?0:e._strategyHWM-e._queueTotalSize}function Be(e,t,r){const o=Object.create(ReadableByteStreamController.prototype);let n,a,i;n=void 0!==t.start?()=>t.start(o):()=>{},a=void 0!==t.pull?()=>t.pull(o):()=>d(void 0),i=void 0!==t.cancel?e=>t.cancel(e):()=>d(void 0);const l=t.autoAllocateChunkSize;if(0===l)throw new TypeError(\"autoAllocateChunkSize must be greater than 0\");!function(e,t,r,o,n,a,i){t._controlledReadableByteStream=e,t._pullAgain=!1,t._pulling=!1,t._byobRequest=null,t._queue=t._queueTotalSize=void 0,de(t),t._closeRequested=!1,t._started=!1,t._strategyHWM=a,t._pullAlgorithm=o,t._cancelAlgorithm=n,t._autoAllocateChunkSize=i,t._pendingPullIntos=new v,e._readableStreamController=t,h(d(r()),(()=>(t._started=!0,he(t),null)),(e=>(We(t,e),null)))}(e,o,n,a,i,r,l)}function Ae(e){return new TypeError(`ReadableStreamBYOBRequest.prototype.${e} can only be used on a ReadableStreamBYOBRequest`)}function je(e){return new TypeError(`ReadableByteStreamController.prototype.${e} can only be used on a ReadableByteStreamController`)}function ze(e,t){e._reader._readIntoRequests.push(t)}function Le(e){return e._reader._readIntoRequests.length}function Fe(e){const t=e._reader;return void 0!==t&&!!De(t)}Object.defineProperties(ReadableByteStreamController.prototype,{close:{enumerable:!0},enqueue:{enumerable:!0},error:{enumerable:!0},byobRequest:{enumerable:!0},desiredSize:{enumerable:!0}}),a(ReadableByteStreamController.prototype.close,\"close\"),a(ReadableByteStreamController.prototype.enqueue,\"enqueue\"),a(ReadableByteStreamController.prototype.error,\"error\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableByteStreamController.prototype,t.toStringTag,{value:\"ReadableByteStreamController\",configurable:!0});class ReadableStreamBYOBReader{constructor(e){if(M(e,1,\"ReadableStreamBYOBReader\"),U(e,\"First parameter\"),Gt(e))throw new TypeError(\"This stream has already been locked for exclusive reading by another reader\");if(!fe(e._readableStreamController))throw new TypeError(\"Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte source\");E(this,e),this._readIntoRequests=new v}get closed(){return De(this)?this._closedPromise:f($e(\"closed\"))}cancel(e){return De(this)?void 0===this._ownerReadableStream?f(k(\"cancel\")):W(this,e):f($e(\"cancel\"))}read(e){if(!De(this))return f($e(\"read\"));if(!ArrayBuffer.isView(e))return f(new TypeError(\"view must be an array buffer view\"));if(0===e.byteLength)return f(new TypeError(\"view must have non-zero byteLength\"));if(0===e.buffer.byteLength)return f(new TypeError(\"view's buffer must have non-zero byteLength\"));if(e.buffer,void 0===this._ownerReadableStream)return f(k(\"read from\"));let t,r;const o=c(((e,o)=>{t=e,r=o}));return function(e,t,r){const o=e._ownerReadableStream;o._disturbed=!0,\"errored\"===o._state?r._errorSteps(o._storedError):function(e,t,r){const o=e._controlledReadableByteStream;let n=1;t.constructor!==DataView&&(n=t.constructor.BYTES_PER_ELEMENT);const a=t.constructor,i=t.buffer,l={buffer:i,bufferByteLength:i.byteLength,byteOffset:t.byteOffset,byteLength:t.byteLength,bytesFilled:0,elementSize:n,viewConstructor:a,readerType:\"byob\"};if(e._pendingPullIntos.length>0)return e._pendingPullIntos.push(l),void ze(o,r);if(\"closed\"!==o._state){if(e._queueTotalSize>0){if(we(e,l)){const t=me(l);return Re(e),void r._chunkSteps(t)}if(e._closeRequested){const t=new TypeError(\"Insufficient bytes to fill elements in the given buffer\");return We(e,t),void r._errorSteps(t)}}e._pendingPullIntos.push(l),ze(o,r),he(e)}else{const e=new a(l.buffer,l.byteOffset,0);r._closeSteps(e)}}(o._readableStreamController,t,r)}(this,e,{_chunkSteps:e=>t({value:e,done:!1}),_closeSteps:e=>t({value:e,done:!0}),_errorSteps:e=>r(e)}),o}releaseLock(){if(!De(this))throw $e(\"releaseLock\");void 0!==this._ownerReadableStream&&function(e){O(e);const t=new TypeError(\"Reader was released\");Ie(e,t)}(this)}}function De(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readIntoRequests\")&&e instanceof ReadableStreamBYOBReader)}function Ie(e,t){const r=e._readIntoRequests;e._readIntoRequests=new v,r.forEach((e=>{e._errorSteps(t)}))}function $e(e){return new TypeError(`ReadableStreamBYOBReader.prototype.${e} can only be used on a ReadableStreamBYOBReader`)}function Me(e,t){const{highWaterMark:r}=e;if(void 0===r)return t;if(ie(r)||r<0)throw new RangeError(\"Invalid highWaterMark\");return r}function Ye(e){const{size:t}=e;return t||(()=>1)}function Qe(e,t){D(e,t);const r=null==e?void 0:e.highWaterMark,o=null==e?void 0:e.size;return{highWaterMark:void 0===r?void 0:Q(r),size:void 0===o?void 0:Ne(o,`${t} has member 'size' that`)}}function Ne(e,t){return I(e,t),t=>Q(e(t))}function xe(e,t,r){return I(e,r),r=>w(e,t,[r])}function He(e,t,r){return I(e,r),()=>w(e,t,[])}function Ve(e,t,r){return I(e,r),r=>S(e,t,[r])}function Ue(e,t,r){return I(e,r),(r,o)=>w(e,t,[r,o])}Object.defineProperties(ReadableStreamBYOBReader.prototype,{cancel:{enumerable:!0},read:{enumerable:!0},releaseLock:{enumerable:!0},closed:{enumerable:!0}}),a(ReadableStreamBYOBReader.prototype.cancel,\"cancel\"),a(ReadableStreamBYOBReader.prototype.read,\"read\"),a(ReadableStreamBYOBReader.prototype.releaseLock,\"releaseLock\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamBYOBReader.prototype,t.toStringTag,{value:\"ReadableStreamBYOBReader\",configurable:!0});const Ge=\"function\"==typeof AbortController;class WritableStream{constructor(e={},t={}){void 0===e?e=null:$(e,\"First parameter\");const r=Qe(t,\"Second parameter\"),o=function(e,t){D(e,t);const r=null==e?void 0:e.abort,o=null==e?void 0:e.close,n=null==e?void 0:e.start,a=null==e?void 0:e.type,i=null==e?void 0:e.write;return{abort:void 0===r?void 0:xe(r,e,`${t} has member 'abort' that`),close:void 0===o?void 0:He(o,e,`${t} has member 'close' that`),start:void 0===n?void 0:Ve(n,e,`${t} has member 'start' that`),write:void 0===i?void 0:Ue(i,e,`${t} has member 'write' that`),type:a}}(e,\"First parameter\");var n;(n=this)._state=\"writable\",n._storedError=void 0,n._writer=void 0,n._writableStreamController=void 0,n._writeRequests=new v,n._inFlightWriteRequest=void 0,n._closeRequest=void 0,n._inFlightCloseRequest=void 0,n._pendingAbortRequest=void 0,n._backpressure=!1;if(void 0!==o.type)throw new RangeError(\"Invalid type is specified\");const a=Ye(r);!function(e,t,r,o){const n=Object.create(WritableStreamDefaultController.prototype);let a,i,l,s;a=void 0!==t.start?()=>t.start(n):()=>{};i=void 0!==t.write?e=>t.write(e,n):()=>d(void 0);l=void 0!==t.close?()=>t.close():()=>d(void 0);s=void 0!==t.abort?e=>t.abort(e):()=>d(void 0);!function(e,t,r,o,n,a,i,l){t._controlledWritableStream=e,e._writableStreamController=t,t._queue=void 0,t._queueTotalSize=void 0,de(t),t._abortReason=void 0,t._abortController=function(){if(Ge)return new AbortController}(),t._started=!1,t._strategySizeAlgorithm=l,t._strategyHWM=i,t._writeAlgorithm=o,t._closeAlgorithm=n,t._abortAlgorithm=a;const s=ht(t);at(e,s);const u=r();h(d(u),(()=>(t._started=!0,ft(t),null)),(r=>(t._started=!0,et(e,r),null)))}(e,n,a,i,l,s,r,o)}(this,o,Me(r,1),a)}get locked(){if(!Xe(this))throw pt(\"locked\");return Je(this)}abort(e){return Xe(this)?Je(this)?f(new TypeError(\"Cannot abort a stream that already has a writer\")):Ke(this,e):f(pt(\"abort\"))}close(){return Xe(this)?Je(this)?f(new TypeError(\"Cannot close a stream that already has a writer\")):ot(this)?f(new TypeError(\"Cannot close an already-closing stream\")):Ze(this):f(pt(\"close\"))}getWriter(){if(!Xe(this))throw pt(\"getWriter\");return new WritableStreamDefaultWriter(this)}}function Xe(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_writableStreamController\")&&e instanceof WritableStream)}function Je(e){return void 0!==e._writer}function Ke(e,t){var r;if(\"closed\"===e._state||\"errored\"===e._state)return d(void 0);e._writableStreamController._abortReason=t,null===(r=e._writableStreamController._abortController)||void 0===r||r.abort(t);const o=e._state;if(\"closed\"===o||\"errored\"===o)return d(void 0);if(void 0!==e._pendingAbortRequest)return e._pendingAbortRequest._promise;let n=!1;\"erroring\"===o&&(n=!0,t=void 0);const a=c(((r,o)=>{e._pendingAbortRequest={_promise:void 0,_resolve:r,_reject:o,_reason:t,_wasAlreadyErroring:n}}));return e._pendingAbortRequest._promise=a,n||tt(e,t),a}function Ze(e){const t=e._state;if(\"closed\"===t||\"errored\"===t)return f(new TypeError(`The stream (in ${t} state) is not in the writable state and cannot be closed`));const r=c(((t,r)=>{const o={_resolve:t,_reject:r};e._closeRequest=o})),o=e._writer;var n;return void 0!==o&&e._backpressure&&\"writable\"===t&&Et(o),ce(n=e._writableStreamController,st,0),ft(n),r}function et(e,t){\"writable\"!==e._state?rt(e):tt(e,t)}function tt(e,t){const r=e._writableStreamController;e._state=\"erroring\",e._storedError=t;const o=e._writer;void 0!==o&<(o,t),!function(e){if(void 0===e._inFlightWriteRequest&&void 0===e._inFlightCloseRequest)return!1;return!0}(e)&&r._started&&rt(e)}function rt(e){e._state=\"errored\",e._writableStreamController[T]();const t=e._storedError;if(e._writeRequests.forEach((e=>{e._reject(t)})),e._writeRequests=new v,void 0===e._pendingAbortRequest)return void nt(e);const r=e._pendingAbortRequest;if(e._pendingAbortRequest=void 0,r._wasAlreadyErroring)return r._reject(t),void nt(e);h(e._writableStreamController[R](r._reason),(()=>(r._resolve(),nt(e),null)),(t=>(r._reject(t),nt(e),null)))}function ot(e){return void 0!==e._closeRequest||void 0!==e._inFlightCloseRequest}function nt(e){void 0!==e._closeRequest&&(e._closeRequest._reject(e._storedError),e._closeRequest=void 0);const t=e._writer;void 0!==t&&vt(t,e._storedError)}function at(e,t){const r=e._writer;void 0!==r&&t!==e._backpressure&&(t?function(e){Tt(e)}(r):Et(r)),e._backpressure=t}Object.defineProperties(WritableStream.prototype,{abort:{enumerable:!0},close:{enumerable:!0},getWriter:{enumerable:!0},locked:{enumerable:!0}}),a(WritableStream.prototype.abort,\"abort\"),a(WritableStream.prototype.close,\"close\"),a(WritableStream.prototype.getWriter,\"getWriter\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStream.prototype,t.toStringTag,{value:\"WritableStream\",configurable:!0});class WritableStreamDefaultWriter{constructor(e){if(M(e,1,\"WritableStreamDefaultWriter\"),function(e,t){if(!Xe(e))throw new TypeError(`${t} is not a WritableStream.`)}(e,\"First parameter\"),Je(e))throw new TypeError(\"This stream has already been locked for exclusive writing by another writer\");this._ownerWritableStream=e,e._writer=this;const t=e._state;if(\"writable\"===t)!ot(e)&&e._backpressure?Tt(this):Ct(this),St(this);else if(\"erroring\"===t)qt(this,e._storedError),St(this);else if(\"closed\"===t)Ct(this),St(r=this),Rt(r);else{const t=e._storedError;qt(this,t),wt(this,t)}var r}get closed(){return it(this)?this._closedPromise:f(yt(\"closed\"))}get desiredSize(){if(!it(this))throw yt(\"desiredSize\");if(void 0===this._ownerWritableStream)throw gt(\"desiredSize\");return function(e){const t=e._ownerWritableStream,r=t._state;if(\"errored\"===r||\"erroring\"===r)return null;if(\"closed\"===r)return 0;return dt(t._writableStreamController)}(this)}get ready(){return it(this)?this._readyPromise:f(yt(\"ready\"))}abort(e){return it(this)?void 0===this._ownerWritableStream?f(gt(\"abort\")):function(e,t){return Ke(e._ownerWritableStream,t)}(this,e):f(yt(\"abort\"))}close(){if(!it(this))return f(yt(\"close\"));const e=this._ownerWritableStream;return void 0===e?f(gt(\"close\")):ot(e)?f(new TypeError(\"Cannot close an already-closing stream\")):Ze(this._ownerWritableStream)}releaseLock(){if(!it(this))throw yt(\"releaseLock\");void 0!==this._ownerWritableStream&&function(e){const t=e._ownerWritableStream,r=new TypeError(\"Writer was released and can no longer be used to monitor the stream's closedness\");lt(e,r),function(e,t){\"pending\"===e._closedPromiseState?vt(e,t):function(e,t){wt(e,t)}(e,t)}(e,r),t._writer=void 0,e._ownerWritableStream=void 0}(this)}write(e){return it(this)?void 0===this._ownerWritableStream?f(gt(\"write to\")):function(e,t){const r=e._ownerWritableStream,o=r._writableStreamController,n=function(e,t){try{return e._strategySizeAlgorithm(t)}catch(t){return bt(e,t),1}}(o,t);if(r!==e._ownerWritableStream)return f(gt(\"write to\"));const a=r._state;if(\"errored\"===a)return f(r._storedError);if(ot(r)||\"closed\"===a)return f(new TypeError(\"The stream is closing or closed and cannot be written to\"));if(\"erroring\"===a)return f(r._storedError);const i=function(e){return c(((t,r)=>{const o={_resolve:t,_reject:r};e._writeRequests.push(o)}))}(r);return function(e,t,r){try{ce(e,t,r)}catch(t){return void bt(e,t)}const o=e._controlledWritableStream;if(!ot(o)&&\"writable\"===o._state){at(o,ht(e))}ft(e)}(o,t,n),i}(this,e):f(yt(\"write\"))}}function it(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_ownerWritableStream\")&&e instanceof WritableStreamDefaultWriter)}function lt(e,t){\"pending\"===e._readyPromiseState?Pt(e,t):function(e,t){qt(e,t)}(e,t)}Object.defineProperties(WritableStreamDefaultWriter.prototype,{abort:{enumerable:!0},close:{enumerable:!0},releaseLock:{enumerable:!0},write:{enumerable:!0},closed:{enumerable:!0},desiredSize:{enumerable:!0},ready:{enumerable:!0}}),a(WritableStreamDefaultWriter.prototype.abort,\"abort\"),a(WritableStreamDefaultWriter.prototype.close,\"close\"),a(WritableStreamDefaultWriter.prototype.releaseLock,\"releaseLock\"),a(WritableStreamDefaultWriter.prototype.write,\"write\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStreamDefaultWriter.prototype,t.toStringTag,{value:\"WritableStreamDefaultWriter\",configurable:!0});const st={};class WritableStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get abortReason(){if(!ut(this))throw mt(\"abortReason\");return this._abortReason}get signal(){if(!ut(this))throw mt(\"signal\");if(void 0===this._abortController)throw new TypeError(\"WritableStreamDefaultController.prototype.signal is not supported\");return this._abortController.signal}error(e){if(!ut(this))throw mt(\"error\");\"writable\"===this._controlledWritableStream._state&&_t(this,e)}[R](e){const t=this._abortAlgorithm(e);return ct(this),t}[T](){de(this)}}function ut(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledWritableStream\")&&e instanceof WritableStreamDefaultController)}function ct(e){e._writeAlgorithm=void 0,e._closeAlgorithm=void 0,e._abortAlgorithm=void 0,e._strategySizeAlgorithm=void 0}function dt(e){return e._strategyHWM-e._queueTotalSize}function ft(e){const t=e._controlledWritableStream;if(!e._started)return;if(void 0!==t._inFlightWriteRequest)return;if(\"erroring\"===t._state)return void rt(t);if(0===e._queue.length)return;const r=e._queue.peek().value;r===st?function(e){const t=e._controlledWritableStream;(function(e){e._inFlightCloseRequest=e._closeRequest,e._closeRequest=void 0})(t),ue(e);const r=e._closeAlgorithm();ct(e),h(r,(()=>(function(e){e._inFlightCloseRequest._resolve(void 0),e._inFlightCloseRequest=void 0,\"erroring\"===e._state&&(e._storedError=void 0,void 0!==e._pendingAbortRequest&&(e._pendingAbortRequest._resolve(),e._pendingAbortRequest=void 0)),e._state=\"closed\";const t=e._writer;void 0!==t&&Rt(t)}(t),null)),(e=>(function(e,t){e._inFlightCloseRequest._reject(t),e._inFlightCloseRequest=void 0,void 0!==e._pendingAbortRequest&&(e._pendingAbortRequest._reject(t),e._pendingAbortRequest=void 0),et(e,t)}(t,e),null)))}(e):function(e,t){const r=e._controlledWritableStream;!function(e){e._inFlightWriteRequest=e._writeRequests.shift()}(r);h(e._writeAlgorithm(t),(()=>{!function(e){e._inFlightWriteRequest._resolve(void 0),e._inFlightWriteRequest=void 0}(r);const t=r._state;if(ue(e),!ot(r)&&\"writable\"===t){const t=ht(e);at(r,t)}return ft(e),null}),(t=>(\"writable\"===r._state&&ct(e),function(e,t){e._inFlightWriteRequest._reject(t),e._inFlightWriteRequest=void 0,et(e,t)}(r,t),null)))}(e,r)}function bt(e,t){\"writable\"===e._controlledWritableStream._state&&_t(e,t)}function ht(e){return dt(e)<=0}function _t(e,t){const r=e._controlledWritableStream;ct(e),tt(r,t)}function pt(e){return new TypeError(`WritableStream.prototype.${e} can only be used on a WritableStream`)}function mt(e){return new TypeError(`WritableStreamDefaultController.prototype.${e} can only be used on a WritableStreamDefaultController`)}function yt(e){return new TypeError(`WritableStreamDefaultWriter.prototype.${e} can only be used on a WritableStreamDefaultWriter`)}function gt(e){return new TypeError(\"Cannot \"+e+\" a stream using a released writer\")}function St(e){e._closedPromise=c(((t,r)=>{e._closedPromise_resolve=t,e._closedPromise_reject=r,e._closedPromiseState=\"pending\"}))}function wt(e,t){St(e),vt(e,t)}function vt(e,t){void 0!==e._closedPromise_reject&&(y(e._closedPromise),e._closedPromise_reject(t),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0,e._closedPromiseState=\"rejected\")}function Rt(e){void 0!==e._closedPromise_resolve&&(e._closedPromise_resolve(void 0),e._closedPromise_resolve=void 0,e._closedPromise_reject=void 0,e._closedPromiseState=\"resolved\")}function Tt(e){e._readyPromise=c(((t,r)=>{e._readyPromise_resolve=t,e._readyPromise_reject=r})),e._readyPromiseState=\"pending\"}function qt(e,t){Tt(e),Pt(e,t)}function Ct(e){Tt(e),Et(e)}function Pt(e,t){void 0!==e._readyPromise_reject&&(y(e._readyPromise),e._readyPromise_reject(t),e._readyPromise_resolve=void 0,e._readyPromise_reject=void 0,e._readyPromiseState=\"rejected\")}function Et(e){void 0!==e._readyPromise_resolve&&(e._readyPromise_resolve(void 0),e._readyPromise_resolve=void 0,e._readyPromise_reject=void 0,e._readyPromiseState=\"fulfilled\")}Object.defineProperties(WritableStreamDefaultController.prototype,{abortReason:{enumerable:!0},signal:{enumerable:!0},error:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(WritableStreamDefaultController.prototype,t.toStringTag,{value:\"WritableStreamDefaultController\",configurable:!0});const Wt=\"undefined\"!=typeof DOMException?DOMException:void 0;const Ot=function(e){if(\"function\"!=typeof e&&\"object\"!=typeof e)return!1;try{return new e,!0}catch(e){return!1}}(Wt)?Wt:function(){const e=function(e,t){this.message=e||\"\",this.name=t||\"Error\",Error.captureStackTrace&&Error.captureStackTrace(this,this.constructor)};return e.prototype=Object.create(Error.prototype),Object.defineProperty(e.prototype,\"constructor\",{value:e,writable:!0,configurable:!0}),e}();function kt(e,t,r,o,n,a){const i=e.getReader(),l=t.getWriter();Ut(e)&&(e._disturbed=!0);let s,u,p,S=!1,w=!1,v=\"readable\",R=\"writable\",T=!1,q=!1;const C=c((e=>{p=e}));let P=Promise.resolve(void 0);return c(((E,W)=>{let O;function k(){if(S)return;const e=c(((e,t)=>{!function r(o){o?e():b(function(){if(S)return d(!0);return b(l.ready,(()=>b(i.read(),(e=>!!e.done||(P=l.write(e.value),y(P),!1)))))}(),r,t)}(!1)}));y(e)}function B(){return v=\"closed\",r?L():z((()=>(Xe(t)&&(T=ot(t),R=t._state),T||\"closed\"===R?d(void 0):\"erroring\"===R||\"errored\"===R?f(u):(T=!0,l.close()))),!1,void 0),null}function A(e){return S||(v=\"errored\",s=e,o?L(!0,e):z((()=>l.abort(e)),!0,e)),null}function j(e){return w||(R=\"errored\",u=e,n?L(!0,e):z((()=>i.cancel(e)),!0,e)),null}if(void 0!==a&&(O=()=>{const e=void 0!==a.reason?a.reason:new Ot(\"Aborted\",\"AbortError\"),t=[];o||t.push((()=>\"writable\"===R?l.abort(e):d(void 0))),n||t.push((()=>\"readable\"===v?i.cancel(e):d(void 0))),z((()=>Promise.all(t.map((e=>e())))),!0,e)},a.aborted?O():a.addEventListener(\"abort\",O)),Ut(e)&&(v=e._state,s=e._storedError),Xe(t)&&(R=t._state,u=t._storedError,T=ot(t)),Ut(e)&&Xe(t)&&(q=!0,p()),\"errored\"===v)A(s);else if(\"erroring\"===R||\"errored\"===R)j(u);else if(\"closed\"===v)B();else if(T||\"closed\"===R){const e=new TypeError(\"the destination writable stream closed before all data could be piped to it\");n?L(!0,e):z((()=>i.cancel(e)),!0,e)}function z(e,t,r){function o(){return\"writable\"!==R||T?n():_(function(){let e;return d(function t(){if(e!==P)return e=P,m(P,t,t)}())}(),n),null}function n(){return e?h(e(),(()=>F(t,r)),(e=>F(!0,e))):F(t,r),null}S||(S=!0,q?o():_(C,o))}function L(e,t){z(void 0,e,t)}function F(e,t){return w=!0,l.releaseLock(),i.releaseLock(),void 0!==a&&a.removeEventListener(\"abort\",O),e?W(t):E(void 0),null}S||(h(i.closed,B,A),h(l.closed,(function(){return w||(R=\"closed\"),null}),j)),q?k():g((()=>{q=!0,p(),k()}))}))}function Bt(e,t){return function(e){try{return e.getReader({mode:\"byob\"}).releaseLock(),!0}catch(e){return!1}}(e)?function(e){let t,r,o,n,a,i=e.getReader(),l=!1,s=!1,u=!1,f=!1,b=!1,_=!1;const m=c((e=>{a=e}));function y(e){p(e.closed,(t=>(e!==i||(o.error(t),n.error(t),b&&_||a(void 0)),null)))}function g(){l&&(i.releaseLock(),i=e.getReader(),y(i),l=!1),h(i.read(),(e=>{var t,r;if(u=!1,f=!1,e.done)return b||o.close(),_||n.close(),null===(t=o.byobRequest)||void 0===t||t.respond(0),null===(r=n.byobRequest)||void 0===r||r.respond(0),b&&_||a(void 0),null;const l=e.value,c=l;let d=l;if(!b&&!_)try{d=se(l)}catch(e){return o.error(e),n.error(e),a(i.cancel(e)),null}return b||o.enqueue(c),_||n.enqueue(d),s=!1,u?w():f&&v(),null}),(()=>(s=!1,null)))}function S(t,r){l||(i.releaseLock(),i=e.getReader({mode:\"byob\"}),y(i),l=!0);const c=r?n:o,d=r?o:n;h(i.read(t),(e=>{var t;u=!1,f=!1;const o=r?_:b,n=r?b:_;if(e.done){o||c.close(),n||d.close();const r=e.value;return void 0!==r&&(o||c.byobRequest.respondWithNewView(r),n||null===(t=d.byobRequest)||void 0===t||t.respond(0)),o&&n||a(void 0),null}const l=e.value;if(n)o||c.byobRequest.respondWithNewView(l);else{let e;try{e=se(l)}catch(e){return c.error(e),d.error(e),a(i.cancel(e)),null}o||c.byobRequest.respondWithNewView(l),d.enqueue(e)}return s=!1,u?w():f&&v(),null}),(()=>(s=!1,null)))}function w(){if(s)return u=!0,d(void 0);s=!0;const e=o.byobRequest;return null===e?g():S(e.view,!1),d(void 0)}function v(){if(s)return f=!0,d(void 0);s=!0;const e=n.byobRequest;return null===e?g():S(e.view,!0),d(void 0)}function R(e){if(b=!0,t=e,_){const e=[t,r],o=i.cancel(e);a(o)}return m}function T(e){if(_=!0,r=e,b){const e=[t,r],o=i.cancel(e);a(o)}return m}const q=new ReadableStream({type:\"bytes\",start(e){o=e},pull:w,cancel:R}),C=new ReadableStream({type:\"bytes\",start(e){n=e},pull:v,cancel:T});return y(i),[q,C]}(e):function(e,t){const r=e.getReader();let o,n,a,i,l,s=!1,u=!1,f=!1,b=!1;const _=c((e=>{l=e}));function m(){return s?(u=!0,d(void 0)):(s=!0,h(r.read(),(e=>{if(u=!1,e.done)return f||a.close(),b||i.close(),f&&b||l(void 0),null;const t=e.value,r=t,o=t;return f||a.enqueue(r),b||i.enqueue(o),s=!1,u&&m(),null}),(()=>(s=!1,null))),d(void 0))}function y(e){if(f=!0,o=e,b){const e=[o,n],t=r.cancel(e);l(t)}return _}function g(e){if(b=!0,n=e,f){const e=[o,n],t=r.cancel(e);l(t)}return _}const S=new ReadableStream({start(e){a=e},pull:m,cancel:y}),w=new ReadableStream({start(e){i=e},pull:m,cancel:g});return p(r.closed,(e=>(a.error(e),i.error(e),f&&b||l(void 0),null))),[S,w]}(e)}class ReadableStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get desiredSize(){if(!At(this))throw $t(\"desiredSize\");return Ft(this)}close(){if(!At(this))throw $t(\"close\");if(!Dt(this))throw new TypeError(\"The stream is not in a state that permits close\");!function(e){if(!Dt(e))return;const t=e._controlledReadableStream;e._closeRequested=!0,0===e._queue.length&&(zt(e),Jt(t))}(this)}enqueue(e){if(!At(this))throw $t(\"enqueue\");if(!Dt(this))throw new TypeError(\"The stream is not in a state that permits enqueue\");return function(e,t){if(!Dt(e))return;const r=e._controlledReadableStream;if(Gt(r)&&J(r)>0)X(r,t,!1);else{let r;try{r=e._strategySizeAlgorithm(t)}catch(t){throw Lt(e,t),t}try{ce(e,t,r)}catch(t){throw Lt(e,t),t}}jt(e)}(this,e)}error(e){if(!At(this))throw $t(\"error\");Lt(this,e)}[q](e){de(this);const t=this._cancelAlgorithm(e);return zt(this),t}[C](e){const t=this._controlledReadableStream;if(this._queue.length>0){const r=ue(this);this._closeRequested&&0===this._queue.length?(zt(this),Jt(t)):jt(this),e._chunkSteps(r)}else G(t,e),jt(this)}[P](){}}function At(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledReadableStream\")&&e instanceof ReadableStreamDefaultController)}function jt(e){const t=function(e){const t=e._controlledReadableStream;if(!Dt(e))return!1;if(!e._started)return!1;if(Gt(t)&&J(t)>0)return!0;if(Ft(e)>0)return!0;return!1}(e);if(!t)return;if(e._pulling)return void(e._pullAgain=!0);e._pulling=!0;h(e._pullAlgorithm(),(()=>(e._pulling=!1,e._pullAgain&&(e._pullAgain=!1,jt(e)),null)),(t=>(Lt(e,t),null)))}function zt(e){e._pullAlgorithm=void 0,e._cancelAlgorithm=void 0,e._strategySizeAlgorithm=void 0}function Lt(e,t){const r=e._controlledReadableStream;\"readable\"===r._state&&(de(e),zt(e),Kt(r,t))}function Ft(e){const t=e._controlledReadableStream._state;return\"errored\"===t?null:\"closed\"===t?0:e._strategyHWM-e._queueTotalSize}function Dt(e){return!e._closeRequested&&\"readable\"===e._controlledReadableStream._state}function It(e,t,r,o){const n=Object.create(ReadableStreamDefaultController.prototype);let a,i,l;a=void 0!==t.start?()=>t.start(n):()=>{},i=void 0!==t.pull?()=>t.pull(n):()=>d(void 0),l=void 0!==t.cancel?e=>t.cancel(e):()=>d(void 0),function(e,t,r,o,n,a,i){t._controlledReadableStream=e,t._queue=void 0,t._queueTotalSize=void 0,de(t),t._started=!1,t._closeRequested=!1,t._pullAgain=!1,t._pulling=!1,t._strategySizeAlgorithm=i,t._strategyHWM=a,t._pullAlgorithm=o,t._cancelAlgorithm=n,e._readableStreamController=t,h(d(r()),(()=>(t._started=!0,jt(t),null)),(e=>(Lt(t,e),null)))}(e,n,a,i,l,r,o)}function $t(e){return new TypeError(`ReadableStreamDefaultController.prototype.${e} can only be used on a ReadableStreamDefaultController`)}function Mt(e,t,r){return I(e,r),r=>w(e,t,[r])}function Yt(e,t,r){return I(e,r),r=>w(e,t,[r])}function Qt(e,t,r){return I(e,r),r=>S(e,t,[r])}function Nt(e,t){if(\"bytes\"!==(e=`${e}`))throw new TypeError(`${t} '${e}' is not a valid enumeration value for ReadableStreamType`);return e}function xt(e,t){if(\"byob\"!==(e=`${e}`))throw new TypeError(`${t} '${e}' is not a valid enumeration value for ReadableStreamReaderMode`);return e}function Ht(e,t){D(e,t);const r=null==e?void 0:e.preventAbort,o=null==e?void 0:e.preventCancel,n=null==e?void 0:e.preventClose,a=null==e?void 0:e.signal;return void 0!==a&&function(e,t){if(!function(e){if(\"object\"!=typeof e||null===e)return!1;try{return\"boolean\"==typeof e.aborted}catch(e){return!1}}(e))throw new TypeError(`${t} is not an AbortSignal.`)}(a,`${t} has member 'signal' that`),{preventAbort:Boolean(r),preventCancel:Boolean(o),preventClose:Boolean(n),signal:a}}function Vt(e,t){D(e,t);const r=null==e?void 0:e.readable;Y(r,\"readable\",\"ReadableWritablePair\"),function(e,t){if(!H(e))throw new TypeError(`${t} is not a ReadableStream.`)}(r,`${t} has member 'readable' that`);const o=null==e?void 0:e.writable;return Y(o,\"writable\",\"ReadableWritablePair\"),function(e,t){if(!V(e))throw new TypeError(`${t} is not a WritableStream.`)}(o,`${t} has member 'writable' that`),{readable:r,writable:o}}Object.defineProperties(ReadableStreamDefaultController.prototype,{close:{enumerable:!0},enqueue:{enumerable:!0},error:{enumerable:!0},desiredSize:{enumerable:!0}}),a(ReadableStreamDefaultController.prototype.close,\"close\"),a(ReadableStreamDefaultController.prototype.enqueue,\"enqueue\"),a(ReadableStreamDefaultController.prototype.error,\"error\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStreamDefaultController.prototype,t.toStringTag,{value:\"ReadableStreamDefaultController\",configurable:!0});class ReadableStream{constructor(e={},t={}){void 0===e?e=null:$(e,\"First parameter\");const r=Qe(t,\"Second parameter\"),o=function(e,t){D(e,t);const r=e,o=null==r?void 0:r.autoAllocateChunkSize,n=null==r?void 0:r.cancel,a=null==r?void 0:r.pull,i=null==r?void 0:r.start,l=null==r?void 0:r.type;return{autoAllocateChunkSize:void 0===o?void 0:x(o,`${t} has member 'autoAllocateChunkSize' that`),cancel:void 0===n?void 0:Mt(n,r,`${t} has member 'cancel' that`),pull:void 0===a?void 0:Yt(a,r,`${t} has member 'pull' that`),start:void 0===i?void 0:Qt(i,r,`${t} has member 'start' that`),type:void 0===l?void 0:Nt(l,`${t} has member 'type' that`)}}(e,\"First parameter\");var n;if((n=this)._state=\"readable\",n._reader=void 0,n._storedError=void 0,n._disturbed=!1,\"bytes\"===o.type){if(void 0!==r.size)throw new RangeError(\"The strategy for a byte stream cannot have a size function\");Be(this,o,Me(r,0))}else{const e=Ye(r);It(this,o,Me(r,1),e)}}get locked(){if(!Ut(this))throw Zt(\"locked\");return Gt(this)}cancel(e){return Ut(this)?Gt(this)?f(new TypeError(\"Cannot cancel a stream that already has a reader\")):Xt(this,e):f(Zt(\"cancel\"))}getReader(e){if(!Ut(this))throw Zt(\"getReader\");return void 0===function(e,t){D(e,t);const r=null==e?void 0:e.mode;return{mode:void 0===r?void 0:xt(r,`${t} has member 'mode' that`)}}(e,\"First parameter\").mode?new ReadableStreamDefaultReader(this):function(e){return new ReadableStreamBYOBReader(e)}(this)}pipeThrough(e,t={}){if(!H(this))throw Zt(\"pipeThrough\");M(e,1,\"pipeThrough\");const r=Vt(e,\"First parameter\"),o=Ht(t,\"Second parameter\");if(this.locked)throw new TypeError(\"ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream\");if(r.writable.locked)throw new TypeError(\"ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream\");return y(kt(this,r.writable,o.preventClose,o.preventAbort,o.preventCancel,o.signal)),r.readable}pipeTo(e,t={}){if(!H(this))return f(Zt(\"pipeTo\"));if(void 0===e)return f(\"Parameter 1 is required in 'pipeTo'.\");if(!V(e))return f(new TypeError(\"ReadableStream.prototype.pipeTo's first argument must be a WritableStream\"));let r;try{r=Ht(t,\"Second parameter\")}catch(e){return f(e)}return this.locked?f(new TypeError(\"ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream\")):e.locked?f(new TypeError(\"ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream\")):kt(this,e,r.preventClose,r.preventAbort,r.preventCancel,r.signal)}tee(){if(!H(this))throw Zt(\"tee\");if(this.locked)throw new TypeError(\"Cannot tee a stream that already has a reader\");return Bt(this)}values(e){if(!H(this))throw Zt(\"values\");return function(e,t){const r=e.getReader(),o=new re(r,t),n=Object.create(oe);return n._asyncIteratorImpl=o,n}(this,function(e,t){D(e,t);const r=null==e?void 0:e.preventCancel;return{preventCancel:Boolean(r)}}(e,\"First parameter\").preventCancel)}}function Ut(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_readableStreamController\")&&e instanceof ReadableStream)}function Gt(e){return void 0!==e._reader}function Xt(e,t){if(e._disturbed=!0,\"closed\"===e._state)return d(void 0);if(\"errored\"===e._state)return f(e._storedError);Jt(e);const o=e._reader;if(void 0!==o&&De(o)){const e=o._readIntoRequests;o._readIntoRequests=new v,e.forEach((e=>{e._closeSteps(void 0)}))}return m(e._readableStreamController[q](t),r)}function Jt(e){e._state=\"closed\";const t=e._reader;if(void 0!==t&&(z(t),Z(t))){const e=t._readRequests;t._readRequests=new v,e.forEach((e=>{e._closeSteps()}))}}function Kt(e,t){e._state=\"errored\",e._storedError=t;const r=e._reader;void 0!==r&&(j(r,t),Z(r)?ee(r,t):Ie(r,t))}function Zt(e){return new TypeError(`ReadableStream.prototype.${e} can only be used on a ReadableStream`)}function er(e,t){D(e,t);const r=null==e?void 0:e.highWaterMark;return Y(r,\"highWaterMark\",\"QueuingStrategyInit\"),{highWaterMark:Q(r)}}Object.defineProperties(ReadableStream.prototype,{cancel:{enumerable:!0},getReader:{enumerable:!0},pipeThrough:{enumerable:!0},pipeTo:{enumerable:!0},tee:{enumerable:!0},values:{enumerable:!0},locked:{enumerable:!0}}),a(ReadableStream.prototype.cancel,\"cancel\"),a(ReadableStream.prototype.getReader,\"getReader\"),a(ReadableStream.prototype.pipeThrough,\"pipeThrough\"),a(ReadableStream.prototype.pipeTo,\"pipeTo\"),a(ReadableStream.prototype.tee,\"tee\"),a(ReadableStream.prototype.values,\"values\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ReadableStream.prototype,t.toStringTag,{value:\"ReadableStream\",configurable:!0}),\"symbol\"==typeof t.asyncIterator&&Object.defineProperty(ReadableStream.prototype,t.asyncIterator,{value:ReadableStream.prototype.values,writable:!0,configurable:!0});const tr=e=>e.byteLength;a(tr,\"size\");class ByteLengthQueuingStrategy{constructor(e){M(e,1,\"ByteLengthQueuingStrategy\"),e=er(e,\"First parameter\"),this._byteLengthQueuingStrategyHighWaterMark=e.highWaterMark}get highWaterMark(){if(!or(this))throw rr(\"highWaterMark\");return this._byteLengthQueuingStrategyHighWaterMark}get size(){if(!or(this))throw rr(\"size\");return tr}}function rr(e){return new TypeError(`ByteLengthQueuingStrategy.prototype.${e} can only be used on a ByteLengthQueuingStrategy`)}function or(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_byteLengthQueuingStrategyHighWaterMark\")&&e instanceof ByteLengthQueuingStrategy)}Object.defineProperties(ByteLengthQueuingStrategy.prototype,{highWaterMark:{enumerable:!0},size:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(ByteLengthQueuingStrategy.prototype,t.toStringTag,{value:\"ByteLengthQueuingStrategy\",configurable:!0});const nr=()=>1;a(nr,\"size\");class CountQueuingStrategy{constructor(e){M(e,1,\"CountQueuingStrategy\"),e=er(e,\"First parameter\"),this._countQueuingStrategyHighWaterMark=e.highWaterMark}get highWaterMark(){if(!ir(this))throw ar(\"highWaterMark\");return this._countQueuingStrategyHighWaterMark}get size(){if(!ir(this))throw ar(\"size\");return nr}}function ar(e){return new TypeError(`CountQueuingStrategy.prototype.${e} can only be used on a CountQueuingStrategy`)}function ir(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_countQueuingStrategyHighWaterMark\")&&e instanceof CountQueuingStrategy)}function lr(e,t,r){return I(e,r),r=>w(e,t,[r])}function sr(e,t,r){return I(e,r),r=>S(e,t,[r])}function ur(e,t,r){return I(e,r),(r,o)=>w(e,t,[r,o])}Object.defineProperties(CountQueuingStrategy.prototype,{highWaterMark:{enumerable:!0},size:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(CountQueuingStrategy.prototype,t.toStringTag,{value:\"CountQueuingStrategy\",configurable:!0});class TransformStream{constructor(e={},t={},r={}){void 0===e&&(e=null);const o=Qe(t,\"Second parameter\"),n=Qe(r,\"Third parameter\"),a=function(e,t){D(e,t);const r=null==e?void 0:e.flush,o=null==e?void 0:e.readableType,n=null==e?void 0:e.start,a=null==e?void 0:e.transform,i=null==e?void 0:e.writableType;return{flush:void 0===r?void 0:lr(r,e,`${t} has member 'flush' that`),readableType:o,start:void 0===n?void 0:sr(n,e,`${t} has member 'start' that`),transform:void 0===a?void 0:ur(a,e,`${t} has member 'transform' that`),writableType:i}}(e,\"First parameter\");if(void 0!==a.readableType)throw new RangeError(\"Invalid readableType specified\");if(void 0!==a.writableType)throw new RangeError(\"Invalid writableType specified\");const i=Me(n,0),l=Ye(n),s=Me(o,1),u=Ye(o);let b;!function(e,t,r,o,n,a){function i(){return t}function l(t){return function(e,t){const r=e._transformStreamController;if(e._backpressure){return m(e._backpressureChangePromise,(()=>{if(\"erroring\"===(Xe(e._writable)?e._writable._state:e._writableState))throw Xe(e._writable)?e._writable._storedError:e._writableStoredError;return mr(r,t)}))}return mr(r,t)}(e,t)}function s(t){return function(e,t){return dr(e,t),d(void 0)}(e,t)}function u(){return function(e){const t=e._transformStreamController,r=t._flushAlgorithm();return _r(t),m(r,(()=>{if(\"errored\"===e._readableState)throw e._readableStoredError;Sr(e)&&wr(e)}),(t=>{throw dr(e,t),e._readableStoredError}))}(e)}function c(){return function(e){return br(e,!1),e._backpressureChangePromise}(e)}function f(t){return fr(e,t),d(void 0)}e._writableState=\"writable\",e._writableStoredError=void 0,e._writableHasInFlightOperation=!1,e._writableStarted=!1,e._writable=function(e,t,r,o,n,a,i){return new WritableStream({start(r){e._writableController=r;try{const t=r.signal;void 0!==t&&t.addEventListener(\"abort\",(()=>{\"writable\"===e._writableState&&(e._writableState=\"erroring\",t.reason&&(e._writableStoredError=t.reason))}))}catch(e){}return m(t(),(()=>(e._writableStarted=!0,Pr(e),null)),(t=>{throw e._writableStarted=!0,Tr(e,t),t}))},write:t=>(function(e){e._writableHasInFlightOperation=!0}(e),m(r(t),(()=>(function(e){e._writableHasInFlightOperation=!1}(e),Pr(e),null)),(t=>{throw function(e,t){e._writableHasInFlightOperation=!1,Tr(e,t)}(e,t),t}))),close:()=>(function(e){e._writableHasInFlightOperation=!0}(e),m(o(),(()=>(function(e){e._writableHasInFlightOperation=!1;\"erroring\"===e._writableState&&(e._writableStoredError=void 0);e._writableState=\"closed\"}(e),null)),(t=>{throw function(e,t){e._writableHasInFlightOperation=!1,e._writableState,Tr(e,t)}(e,t),t}))),abort:t=>(e._writableState=\"errored\",e._writableStoredError=t,n(t))},{highWaterMark:a,size:i})}(e,i,l,u,s,r,o),e._readableState=\"readable\",e._readableStoredError=void 0,e._readableCloseRequested=!1,e._readablePulling=!1,e._readable=function(e,t,r,o,n,a){return new ReadableStream({start:r=>(e._readableController=r,t().catch((t=>{vr(e,t)}))),pull:()=>(e._readablePulling=!0,r().catch((t=>{vr(e,t)}))),cancel:t=>(e._readableState=\"closed\",o(t))},{highWaterMark:n,size:a})}(e,i,c,f,n,a),e._backpressure=void 0,e._backpressureChangePromise=void 0,e._backpressureChangePromise_resolve=void 0,br(e,!0),e._transformStreamController=void 0}(this,c((e=>{b=e})),s,u,i,l),function(e,t){const r=Object.create(TransformStreamDefaultController.prototype);let o,n;o=void 0!==t.transform?e=>t.transform(e,r):e=>{try{return pr(r,e),d(void 0)}catch(e){return f(e)}};n=void 0!==t.flush?()=>t.flush(r):()=>d(void 0);!function(e,t,r,o){t._controlledTransformStream=e,e._transformStreamController=t,t._transformAlgorithm=r,t._flushAlgorithm=o}(e,r,o,n)}(this,a),void 0!==a.start?b(a.start(this._transformStreamController)):b(void 0)}get readable(){if(!cr(this))throw gr(\"readable\");return this._readable}get writable(){if(!cr(this))throw gr(\"writable\");return this._writable}}function cr(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_transformStreamController\")&&e instanceof TransformStream)}function dr(e,t){vr(e,t),fr(e,t)}function fr(e,t){_r(e._transformStreamController),function(e,t){e._writableController.error(t);\"writable\"===e._writableState&&qr(e,t)}(e,t),e._backpressure&&br(e,!1)}function br(e,t){void 0!==e._backpressureChangePromise&&e._backpressureChangePromise_resolve(),e._backpressureChangePromise=c((t=>{e._backpressureChangePromise_resolve=t})),e._backpressure=t}Object.defineProperties(TransformStream.prototype,{readable:{enumerable:!0},writable:{enumerable:!0}}),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(TransformStream.prototype,t.toStringTag,{value:\"TransformStream\",configurable:!0});class TransformStreamDefaultController{constructor(){throw new TypeError(\"Illegal constructor\")}get desiredSize(){if(!hr(this))throw yr(\"desiredSize\");return Rr(this._controlledTransformStream)}enqueue(e){if(!hr(this))throw yr(\"enqueue\");pr(this,e)}error(e){if(!hr(this))throw yr(\"error\");var t;t=e,dr(this._controlledTransformStream,t)}terminate(){if(!hr(this))throw yr(\"terminate\");!function(e){const t=e._controlledTransformStream;Sr(t)&&wr(t);const r=new TypeError(\"TransformStream terminated\");fr(t,r)}(this)}}function hr(e){return!!o(e)&&(!!Object.prototype.hasOwnProperty.call(e,\"_controlledTransformStream\")&&e instanceof TransformStreamDefaultController)}function _r(e){e._transformAlgorithm=void 0,e._flushAlgorithm=void 0}function pr(e,t){const r=e._controlledTransformStream;if(!Sr(r))throw new TypeError(\"Readable side is not in a state that permits enqueue\");try{!function(e,t){e._readablePulling=!1;try{e._readableController.enqueue(t)}catch(t){throw vr(e,t),t}}(r,t)}catch(e){throw fr(r,e),r._readableStoredError}const o=function(e){return!function(e){if(!Sr(e))return!1;if(e._readablePulling)return!0;if(Rr(e)>0)return!0;return!1}(e)}(r);o!==r._backpressure&&br(r,!0)}function mr(e,t){return m(e._transformAlgorithm(t),void 0,(t=>{throw dr(e._controlledTransformStream,t),t}))}function yr(e){return new TypeError(`TransformStreamDefaultController.prototype.${e} can only be used on a TransformStreamDefaultController`)}function gr(e){return new TypeError(`TransformStream.prototype.${e} can only be used on a TransformStream`)}function Sr(e){return!e._readableCloseRequested&&\"readable\"===e._readableState}function wr(e){e._readableState=\"closed\",e._readableCloseRequested=!0,e._readableController.close()}function vr(e,t){\"readable\"===e._readableState&&(e._readableState=\"errored\",e._readableStoredError=t),e._readableController.error(t)}function Rr(e){return e._readableController.desiredSize}function Tr(e,t){\"writable\"!==e._writableState?Cr(e):qr(e,t)}function qr(e,t){e._writableState=\"erroring\",e._writableStoredError=t,!function(e){return e._writableHasInFlightOperation}(e)&&e._writableStarted&&Cr(e)}function Cr(e){e._writableState=\"errored\"}function Pr(e){\"erroring\"===e._writableState&&Cr(e)}Object.defineProperties(TransformStreamDefaultController.prototype,{enqueue:{enumerable:!0},error:{enumerable:!0},terminate:{enumerable:!0},desiredSize:{enumerable:!0}}),a(TransformStreamDefaultController.prototype.enqueue,\"enqueue\"),a(TransformStreamDefaultController.prototype.error,\"error\"),a(TransformStreamDefaultController.prototype.terminate,\"terminate\"),\"symbol\"==typeof t.toStringTag&&Object.defineProperty(TransformStreamDefaultController.prototype,t.toStringTag,{value:\"TransformStreamDefaultController\",configurable:!0}),e.ByteLengthQueuingStrategy=ByteLengthQueuingStrategy,e.CountQueuingStrategy=CountQueuingStrategy,e.ReadableByteStreamController=ReadableByteStreamController,e.ReadableStream=ReadableStream,e.ReadableStreamBYOBReader=ReadableStreamBYOBReader,e.ReadableStreamBYOBRequest=ReadableStreamBYOBRequest,e.ReadableStreamDefaultController=ReadableStreamDefaultController,e.ReadableStreamDefaultReader=ReadableStreamDefaultReader,e.TransformStream=TransformStream,e.TransformStreamDefaultController=TransformStreamDefaultController,e.WritableStream=WritableStream,e.WritableStreamDefaultController=WritableStreamDefaultController,e.WritableStreamDefaultWriter=WritableStreamDefaultWriter,Object.defineProperty(e,\"__esModule\",{value:!0})}));\n","/*!\n * humanize-ms - index.js\n * Copyright(c) 2014 dead_horse \n * MIT Licensed\n */\n\n'use strict';\n\n/**\n * Module dependencies.\n */\n\nvar util = require('util');\nvar ms = require('ms');\n\nmodule.exports = function (t) {\n if (typeof t === 'number') return t;\n var r = ms(t);\n if (r === undefined) {\n var err = new Error(util.format('humanize-ms(%j) result undefined', t));\n console.warn(err.stack);\n }\n return r;\n};\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\n/*!\n * is-plain-object \n *\n * Copyright (c) 2014-2017, Jon Schlinkert.\n * Released under the MIT License.\n */\n\nfunction isObject(o) {\n return Object.prototype.toString.call(o) === '[object Object]';\n}\n\nfunction isPlainObject(o) {\n var ctor,prot;\n\n if (isObject(o) === false) return false;\n\n // If has modified constructor\n ctor = o.constructor;\n if (ctor === undefined) return true;\n\n // If has modified prototype\n prot = ctor.prototype;\n if (isObject(prot) === false) return false;\n\n // If constructor does not have an Object-specific method\n if (prot.hasOwnProperty('isPrototypeOf') === false) {\n return false;\n }\n\n // Most likely a plain Object\n return true;\n}\n\nexports.isPlainObject = isPlainObject;\n","/**\n * Helpers.\n */\n\nvar s = 1000;\nvar m = s * 60;\nvar h = m * 60;\nvar d = h * 24;\nvar w = d * 7;\nvar y = d * 365.25;\n\n/**\n * Parse or format the given `val`.\n *\n * Options:\n *\n * - `long` verbose formatting [false]\n *\n * @param {String|Number} val\n * @param {Object} [options]\n * @throws {Error} throw an error if val is not a non-empty string or a number\n * @return {String|Number}\n * @api public\n */\n\nmodule.exports = function (val, options) {\n options = options || {};\n var type = typeof val;\n if (type === 'string' && val.length > 0) {\n return parse(val);\n } else if (type === 'number' && isFinite(val)) {\n return options.long ? fmtLong(val) : fmtShort(val);\n }\n throw new Error(\n 'val is not a non-empty string or a valid number. val=' +\n JSON.stringify(val)\n );\n};\n\n/**\n * Parse the given `str` and return milliseconds.\n *\n * @param {String} str\n * @return {Number}\n * @api private\n */\n\nfunction parse(str) {\n str = String(str);\n if (str.length > 100) {\n return;\n }\n var match = /^(-?(?:\\d+)?\\.?\\d+) *(milliseconds?|msecs?|ms|seconds?|secs?|s|minutes?|mins?|m|hours?|hrs?|h|days?|d|weeks?|w|years?|yrs?|y)?$/i.exec(\n str\n );\n if (!match) {\n return;\n }\n var n = parseFloat(match[1]);\n var type = (match[2] || 'ms').toLowerCase();\n switch (type) {\n case 'years':\n case 'year':\n case 'yrs':\n case 'yr':\n case 'y':\n return n * y;\n case 'weeks':\n case 'week':\n case 'w':\n return n * w;\n case 'days':\n case 'day':\n case 'd':\n return n * d;\n case 'hours':\n case 'hour':\n case 'hrs':\n case 'hr':\n case 'h':\n return n * h;\n case 'minutes':\n case 'minute':\n case 'mins':\n case 'min':\n case 'm':\n return n * m;\n case 'seconds':\n case 'second':\n case 'secs':\n case 'sec':\n case 's':\n return n * s;\n case 'milliseconds':\n case 'millisecond':\n case 'msecs':\n case 'msec':\n case 'ms':\n return n;\n default:\n return undefined;\n }\n}\n\n/**\n * Short format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtShort(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return Math.round(ms / d) + 'd';\n }\n if (msAbs >= h) {\n return Math.round(ms / h) + 'h';\n }\n if (msAbs >= m) {\n return Math.round(ms / m) + 'm';\n }\n if (msAbs >= s) {\n return Math.round(ms / s) + 's';\n }\n return ms + 'ms';\n}\n\n/**\n * Long format for `ms`.\n *\n * @param {Number} ms\n * @return {String}\n * @api private\n */\n\nfunction fmtLong(ms) {\n var msAbs = Math.abs(ms);\n if (msAbs >= d) {\n return plural(ms, msAbs, d, 'day');\n }\n if (msAbs >= h) {\n return plural(ms, msAbs, h, 'hour');\n }\n if (msAbs >= m) {\n return plural(ms, msAbs, m, 'minute');\n }\n if (msAbs >= s) {\n return plural(ms, msAbs, s, 'second');\n }\n return ms + ' ms';\n}\n\n/**\n * Pluralization helper.\n */\n\nfunction plural(ms, msAbs, n, name) {\n var isPlural = msAbs >= n * 1.5;\n return Math.round(ms / n) + ' ' + name + (isPlural ? 's' : '');\n}\n","/*! node-domexception. MIT License. Jimmy Wärting */\n\nif (!globalThis.DOMException) {\n try {\n const { MessageChannel } = require('worker_threads'),\n port = new MessageChannel().port1,\n ab = new ArrayBuffer()\n port.postMessage(ab, [ab, ab])\n } catch (err) {\n err.constructor.name === 'DOMException' && (\n globalThis.DOMException = err.constructor\n )\n }\n}\n\nmodule.exports = globalThis.DOMException\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }\n\nvar Stream = _interopDefault(require('stream'));\nvar http = _interopDefault(require('http'));\nvar Url = _interopDefault(require('url'));\nvar whatwgUrl = _interopDefault(require('whatwg-url'));\nvar https = _interopDefault(require('https'));\nvar zlib = _interopDefault(require('zlib'));\n\n// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js\n\n// fix for \"Readable\" isn't a named export issue\nconst Readable = Stream.Readable;\n\nconst BUFFER = Symbol('buffer');\nconst TYPE = Symbol('type');\n\nclass Blob {\n\tconstructor() {\n\t\tthis[TYPE] = '';\n\n\t\tconst blobParts = arguments[0];\n\t\tconst options = arguments[1];\n\n\t\tconst buffers = [];\n\t\tlet size = 0;\n\n\t\tif (blobParts) {\n\t\t\tconst a = blobParts;\n\t\t\tconst length = Number(a.length);\n\t\t\tfor (let i = 0; i < length; i++) {\n\t\t\t\tconst element = a[i];\n\t\t\t\tlet buffer;\n\t\t\t\tif (element instanceof Buffer) {\n\t\t\t\t\tbuffer = element;\n\t\t\t\t} else if (ArrayBuffer.isView(element)) {\n\t\t\t\t\tbuffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength);\n\t\t\t\t} else if (element instanceof ArrayBuffer) {\n\t\t\t\t\tbuffer = Buffer.from(element);\n\t\t\t\t} else if (element instanceof Blob) {\n\t\t\t\t\tbuffer = element[BUFFER];\n\t\t\t\t} else {\n\t\t\t\t\tbuffer = Buffer.from(typeof element === 'string' ? element : String(element));\n\t\t\t\t}\n\t\t\t\tsize += buffer.length;\n\t\t\t\tbuffers.push(buffer);\n\t\t\t}\n\t\t}\n\n\t\tthis[BUFFER] = Buffer.concat(buffers);\n\n\t\tlet type = options && options.type !== undefined && String(options.type).toLowerCase();\n\t\tif (type && !/[^\\u0020-\\u007E]/.test(type)) {\n\t\t\tthis[TYPE] = type;\n\t\t}\n\t}\n\tget size() {\n\t\treturn this[BUFFER].length;\n\t}\n\tget type() {\n\t\treturn this[TYPE];\n\t}\n\ttext() {\n\t\treturn Promise.resolve(this[BUFFER].toString());\n\t}\n\tarrayBuffer() {\n\t\tconst buf = this[BUFFER];\n\t\tconst ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\treturn Promise.resolve(ab);\n\t}\n\tstream() {\n\t\tconst readable = new Readable();\n\t\treadable._read = function () {};\n\t\treadable.push(this[BUFFER]);\n\t\treadable.push(null);\n\t\treturn readable;\n\t}\n\ttoString() {\n\t\treturn '[object Blob]';\n\t}\n\tslice() {\n\t\tconst size = this.size;\n\n\t\tconst start = arguments[0];\n\t\tconst end = arguments[1];\n\t\tlet relativeStart, relativeEnd;\n\t\tif (start === undefined) {\n\t\t\trelativeStart = 0;\n\t\t} else if (start < 0) {\n\t\t\trelativeStart = Math.max(size + start, 0);\n\t\t} else {\n\t\t\trelativeStart = Math.min(start, size);\n\t\t}\n\t\tif (end === undefined) {\n\t\t\trelativeEnd = size;\n\t\t} else if (end < 0) {\n\t\t\trelativeEnd = Math.max(size + end, 0);\n\t\t} else {\n\t\t\trelativeEnd = Math.min(end, size);\n\t\t}\n\t\tconst span = Math.max(relativeEnd - relativeStart, 0);\n\n\t\tconst buffer = this[BUFFER];\n\t\tconst slicedBuffer = buffer.slice(relativeStart, relativeStart + span);\n\t\tconst blob = new Blob([], { type: arguments[2] });\n\t\tblob[BUFFER] = slicedBuffer;\n\t\treturn blob;\n\t}\n}\n\nObject.defineProperties(Blob.prototype, {\n\tsize: { enumerable: true },\n\ttype: { enumerable: true },\n\tslice: { enumerable: true }\n});\n\nObject.defineProperty(Blob.prototype, Symbol.toStringTag, {\n\tvalue: 'Blob',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * fetch-error.js\n *\n * FetchError interface for operational errors\n */\n\n/**\n * Create FetchError instance\n *\n * @param String message Error message for human\n * @param String type Error type for machine\n * @param String systemError For Node.js system error\n * @return FetchError\n */\nfunction FetchError(message, type, systemError) {\n Error.call(this, message);\n\n this.message = message;\n this.type = type;\n\n // when err.type is `system`, err.code contains system error code\n if (systemError) {\n this.code = this.errno = systemError.code;\n }\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nFetchError.prototype = Object.create(Error.prototype);\nFetchError.prototype.constructor = FetchError;\nFetchError.prototype.name = 'FetchError';\n\nlet convert;\ntry {\n\tconvert = require('encoding').convert;\n} catch (e) {}\n\nconst INTERNALS = Symbol('Body internals');\n\n// fix an issue where \"PassThrough\" isn't a named export for node <10\nconst PassThrough = Stream.PassThrough;\n\n/**\n * Body mixin\n *\n * Ref: https://fetch.spec.whatwg.org/#body\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nfunction Body(body) {\n\tvar _this = this;\n\n\tvar _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {},\n\t _ref$size = _ref.size;\n\n\tlet size = _ref$size === undefined ? 0 : _ref$size;\n\tvar _ref$timeout = _ref.timeout;\n\tlet timeout = _ref$timeout === undefined ? 0 : _ref$timeout;\n\n\tif (body == null) {\n\t\t// body is undefined or null\n\t\tbody = null;\n\t} else if (isURLSearchParams(body)) {\n\t\t// body is a URLSearchParams\n\t\tbody = Buffer.from(body.toString());\n\t} else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') {\n\t\t// body is ArrayBuffer\n\t\tbody = Buffer.from(body);\n\t} else if (ArrayBuffer.isView(body)) {\n\t\t// body is ArrayBufferView\n\t\tbody = Buffer.from(body.buffer, body.byteOffset, body.byteLength);\n\t} else if (body instanceof Stream) ; else {\n\t\t// none of the above\n\t\t// coerce to string then buffer\n\t\tbody = Buffer.from(String(body));\n\t}\n\tthis[INTERNALS] = {\n\t\tbody,\n\t\tdisturbed: false,\n\t\terror: null\n\t};\n\tthis.size = size;\n\tthis.timeout = timeout;\n\n\tif (body instanceof Stream) {\n\t\tbody.on('error', function (err) {\n\t\t\tconst error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err);\n\t\t\t_this[INTERNALS].error = error;\n\t\t});\n\t}\n}\n\nBody.prototype = {\n\tget body() {\n\t\treturn this[INTERNALS].body;\n\t},\n\n\tget bodyUsed() {\n\t\treturn this[INTERNALS].disturbed;\n\t},\n\n\t/**\n * Decode response as ArrayBuffer\n *\n * @return Promise\n */\n\tarrayBuffer() {\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength);\n\t\t});\n\t},\n\n\t/**\n * Return raw response as Blob\n *\n * @return Promise\n */\n\tblob() {\n\t\tlet ct = this.headers && this.headers.get('content-type') || '';\n\t\treturn consumeBody.call(this).then(function (buf) {\n\t\t\treturn Object.assign(\n\t\t\t// Prevent copying\n\t\t\tnew Blob([], {\n\t\t\t\ttype: ct.toLowerCase()\n\t\t\t}), {\n\t\t\t\t[BUFFER]: buf\n\t\t\t});\n\t\t});\n\t},\n\n\t/**\n * Decode response as json\n *\n * @return Promise\n */\n\tjson() {\n\t\tvar _this2 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\ttry {\n\t\t\t\treturn JSON.parse(buffer.toString());\n\t\t\t} catch (err) {\n\t\t\t\treturn Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json'));\n\t\t\t}\n\t\t});\n\t},\n\n\t/**\n * Decode response as text\n *\n * @return Promise\n */\n\ttext() {\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn buffer.toString();\n\t\t});\n\t},\n\n\t/**\n * Decode response as buffer (non-spec api)\n *\n * @return Promise\n */\n\tbuffer() {\n\t\treturn consumeBody.call(this);\n\t},\n\n\t/**\n * Decode response as text, while automatically detecting the encoding and\n * trying to decode to UTF-8 (non-spec api)\n *\n * @return Promise\n */\n\ttextConverted() {\n\t\tvar _this3 = this;\n\n\t\treturn consumeBody.call(this).then(function (buffer) {\n\t\t\treturn convertBody(buffer, _this3.headers);\n\t\t});\n\t}\n};\n\n// In browsers, all properties are enumerable.\nObject.defineProperties(Body.prototype, {\n\tbody: { enumerable: true },\n\tbodyUsed: { enumerable: true },\n\tarrayBuffer: { enumerable: true },\n\tblob: { enumerable: true },\n\tjson: { enumerable: true },\n\ttext: { enumerable: true }\n});\n\nBody.mixIn = function (proto) {\n\tfor (const name of Object.getOwnPropertyNames(Body.prototype)) {\n\t\t// istanbul ignore else: future proof\n\t\tif (!(name in proto)) {\n\t\t\tconst desc = Object.getOwnPropertyDescriptor(Body.prototype, name);\n\t\t\tObject.defineProperty(proto, name, desc);\n\t\t}\n\t}\n};\n\n/**\n * Consume and convert an entire Body to a Buffer.\n *\n * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body\n *\n * @return Promise\n */\nfunction consumeBody() {\n\tvar _this4 = this;\n\n\tif (this[INTERNALS].disturbed) {\n\t\treturn Body.Promise.reject(new TypeError(`body used already for: ${this.url}`));\n\t}\n\n\tthis[INTERNALS].disturbed = true;\n\n\tif (this[INTERNALS].error) {\n\t\treturn Body.Promise.reject(this[INTERNALS].error);\n\t}\n\n\tlet body = this.body;\n\n\t// body is null\n\tif (body === null) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is blob\n\tif (isBlob(body)) {\n\t\tbody = body.stream();\n\t}\n\n\t// body is buffer\n\tif (Buffer.isBuffer(body)) {\n\t\treturn Body.Promise.resolve(body);\n\t}\n\n\t// istanbul ignore if: should never happen\n\tif (!(body instanceof Stream)) {\n\t\treturn Body.Promise.resolve(Buffer.alloc(0));\n\t}\n\n\t// body is stream\n\t// get ready to actually consume the body\n\tlet accum = [];\n\tlet accumBytes = 0;\n\tlet abort = false;\n\n\treturn new Body.Promise(function (resolve, reject) {\n\t\tlet resTimeout;\n\n\t\t// allow timeout on slow response body\n\t\tif (_this4.timeout) {\n\t\t\tresTimeout = setTimeout(function () {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout'));\n\t\t\t}, _this4.timeout);\n\t\t}\n\n\t\t// handle stream errors\n\t\tbody.on('error', function (err) {\n\t\t\tif (err.name === 'AbortError') {\n\t\t\t\t// if the request was aborted, reject with this Error\n\t\t\t\tabort = true;\n\t\t\t\treject(err);\n\t\t\t} else {\n\t\t\t\t// other errors, such as incorrect content-encoding\n\t\t\t\treject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\n\t\tbody.on('data', function (chunk) {\n\t\t\tif (abort || chunk === null) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (_this4.size && accumBytes + chunk.length > _this4.size) {\n\t\t\t\tabort = true;\n\t\t\t\treject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size'));\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\taccumBytes += chunk.length;\n\t\t\taccum.push(chunk);\n\t\t});\n\n\t\tbody.on('end', function () {\n\t\t\tif (abort) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tclearTimeout(resTimeout);\n\n\t\t\ttry {\n\t\t\t\tresolve(Buffer.concat(accum, accumBytes));\n\t\t\t} catch (err) {\n\t\t\t\t// handle streams that have accumulated too much data (issue #414)\n\t\t\t\treject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err));\n\t\t\t}\n\t\t});\n\t});\n}\n\n/**\n * Detect buffer encoding and convert to target encoding\n * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding\n *\n * @param Buffer buffer Incoming buffer\n * @param String encoding Target encoding\n * @return String\n */\nfunction convertBody(buffer, headers) {\n\tif (typeof convert !== 'function') {\n\t\tthrow new Error('The package `encoding` must be installed to use the textConverted() function');\n\t}\n\n\tconst ct = headers.get('content-type');\n\tlet charset = 'utf-8';\n\tlet res, str;\n\n\t// header\n\tif (ct) {\n\t\tres = /charset=([^;]*)/i.exec(ct);\n\t}\n\n\t// no charset in content type, peek at response body for at most 1024 bytes\n\tstr = buffer.slice(0, 1024).toString();\n\n\t// html5\n\tif (!res && str) {\n\t\tres = / 0 && arguments[0] !== undefined ? arguments[0] : undefined;\n\n\t\tthis[MAP] = Object.create(null);\n\n\t\tif (init instanceof Headers) {\n\t\t\tconst rawHeaders = init.raw();\n\t\t\tconst headerNames = Object.keys(rawHeaders);\n\n\t\t\tfor (const headerName of headerNames) {\n\t\t\t\tfor (const value of rawHeaders[headerName]) {\n\t\t\t\t\tthis.append(headerName, value);\n\t\t\t\t}\n\t\t\t}\n\n\t\t\treturn;\n\t\t}\n\n\t\t// We don't worry about converting prop to ByteString here as append()\n\t\t// will handle it.\n\t\tif (init == null) ; else if (typeof init === 'object') {\n\t\t\tconst method = init[Symbol.iterator];\n\t\t\tif (method != null) {\n\t\t\t\tif (typeof method !== 'function') {\n\t\t\t\t\tthrow new TypeError('Header pairs must be iterable');\n\t\t\t\t}\n\n\t\t\t\t// sequence>\n\t\t\t\t// Note: per spec we have to first exhaust the lists then process them\n\t\t\t\tconst pairs = [];\n\t\t\t\tfor (const pair of init) {\n\t\t\t\t\tif (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be iterable');\n\t\t\t\t\t}\n\t\t\t\t\tpairs.push(Array.from(pair));\n\t\t\t\t}\n\n\t\t\t\tfor (const pair of pairs) {\n\t\t\t\t\tif (pair.length !== 2) {\n\t\t\t\t\t\tthrow new TypeError('Each header pair must be a name/value tuple');\n\t\t\t\t\t}\n\t\t\t\t\tthis.append(pair[0], pair[1]);\n\t\t\t\t}\n\t\t\t} else {\n\t\t\t\t// record\n\t\t\t\tfor (const key of Object.keys(init)) {\n\t\t\t\t\tconst value = init[key];\n\t\t\t\t\tthis.append(key, value);\n\t\t\t\t}\n\t\t\t}\n\t\t} else {\n\t\t\tthrow new TypeError('Provided initializer must be an object');\n\t\t}\n\t}\n\n\t/**\n * Return combined header value given name\n *\n * @param String name Header name\n * @return Mixed\n */\n\tget(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key === undefined) {\n\t\t\treturn null;\n\t\t}\n\n\t\treturn this[MAP][key].join(', ');\n\t}\n\n\t/**\n * Iterate over all headers\n *\n * @param Function callback Executed for each item with parameters (value, name, thisArg)\n * @param Boolean thisArg `this` context for callback function\n * @return Void\n */\n\tforEach(callback) {\n\t\tlet thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined;\n\n\t\tlet pairs = getHeaders(this);\n\t\tlet i = 0;\n\t\twhile (i < pairs.length) {\n\t\t\tvar _pairs$i = pairs[i];\n\t\t\tconst name = _pairs$i[0],\n\t\t\t value = _pairs$i[1];\n\n\t\t\tcallback.call(thisArg, value, name, this);\n\t\t\tpairs = getHeaders(this);\n\t\t\ti++;\n\t\t}\n\t}\n\n\t/**\n * Overwrite header values given name\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tset(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tthis[MAP][key !== undefined ? key : name] = [value];\n\t}\n\n\t/**\n * Append a value onto existing header\n *\n * @param String name Header name\n * @param String value Header value\n * @return Void\n */\n\tappend(name, value) {\n\t\tname = `${name}`;\n\t\tvalue = `${value}`;\n\t\tvalidateName(name);\n\t\tvalidateValue(value);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tthis[MAP][key].push(value);\n\t\t} else {\n\t\t\tthis[MAP][name] = [value];\n\t\t}\n\t}\n\n\t/**\n * Check for header name existence\n *\n * @param String name Header name\n * @return Boolean\n */\n\thas(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\treturn find(this[MAP], name) !== undefined;\n\t}\n\n\t/**\n * Delete all header values given name\n *\n * @param String name Header name\n * @return Void\n */\n\tdelete(name) {\n\t\tname = `${name}`;\n\t\tvalidateName(name);\n\t\tconst key = find(this[MAP], name);\n\t\tif (key !== undefined) {\n\t\t\tdelete this[MAP][key];\n\t\t}\n\t}\n\n\t/**\n * Return raw headers (non-spec api)\n *\n * @return Object\n */\n\traw() {\n\t\treturn this[MAP];\n\t}\n\n\t/**\n * Get an iterator on keys.\n *\n * @return Iterator\n */\n\tkeys() {\n\t\treturn createHeadersIterator(this, 'key');\n\t}\n\n\t/**\n * Get an iterator on values.\n *\n * @return Iterator\n */\n\tvalues() {\n\t\treturn createHeadersIterator(this, 'value');\n\t}\n\n\t/**\n * Get an iterator on entries.\n *\n * This is the default iterator of the Headers object.\n *\n * @return Iterator\n */\n\t[Symbol.iterator]() {\n\t\treturn createHeadersIterator(this, 'key+value');\n\t}\n}\nHeaders.prototype.entries = Headers.prototype[Symbol.iterator];\n\nObject.defineProperty(Headers.prototype, Symbol.toStringTag, {\n\tvalue: 'Headers',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Headers.prototype, {\n\tget: { enumerable: true },\n\tforEach: { enumerable: true },\n\tset: { enumerable: true },\n\tappend: { enumerable: true },\n\thas: { enumerable: true },\n\tdelete: { enumerable: true },\n\tkeys: { enumerable: true },\n\tvalues: { enumerable: true },\n\tentries: { enumerable: true }\n});\n\nfunction getHeaders(headers) {\n\tlet kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value';\n\n\tconst keys = Object.keys(headers[MAP]).sort();\n\treturn keys.map(kind === 'key' ? function (k) {\n\t\treturn k.toLowerCase();\n\t} : kind === 'value' ? function (k) {\n\t\treturn headers[MAP][k].join(', ');\n\t} : function (k) {\n\t\treturn [k.toLowerCase(), headers[MAP][k].join(', ')];\n\t});\n}\n\nconst INTERNAL = Symbol('internal');\n\nfunction createHeadersIterator(target, kind) {\n\tconst iterator = Object.create(HeadersIteratorPrototype);\n\titerator[INTERNAL] = {\n\t\ttarget,\n\t\tkind,\n\t\tindex: 0\n\t};\n\treturn iterator;\n}\n\nconst HeadersIteratorPrototype = Object.setPrototypeOf({\n\tnext() {\n\t\t// istanbul ignore if\n\t\tif (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) {\n\t\t\tthrow new TypeError('Value of `this` is not a HeadersIterator');\n\t\t}\n\n\t\tvar _INTERNAL = this[INTERNAL];\n\t\tconst target = _INTERNAL.target,\n\t\t kind = _INTERNAL.kind,\n\t\t index = _INTERNAL.index;\n\n\t\tconst values = getHeaders(target, kind);\n\t\tconst len = values.length;\n\t\tif (index >= len) {\n\t\t\treturn {\n\t\t\t\tvalue: undefined,\n\t\t\t\tdone: true\n\t\t\t};\n\t\t}\n\n\t\tthis[INTERNAL].index = index + 1;\n\n\t\treturn {\n\t\t\tvalue: values[index],\n\t\t\tdone: false\n\t\t};\n\t}\n}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]())));\n\nObject.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, {\n\tvalue: 'HeadersIterator',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\n/**\n * Export the Headers object in a form that Node.js can consume.\n *\n * @param Headers headers\n * @return Object\n */\nfunction exportNodeCompatibleHeaders(headers) {\n\tconst obj = Object.assign({ __proto__: null }, headers[MAP]);\n\n\t// http.request() only supports string as Host header. This hack makes\n\t// specifying custom Host header possible.\n\tconst hostHeaderKey = find(headers[MAP], 'Host');\n\tif (hostHeaderKey !== undefined) {\n\t\tobj[hostHeaderKey] = obj[hostHeaderKey][0];\n\t}\n\n\treturn obj;\n}\n\n/**\n * Create a Headers object from an object of headers, ignoring those that do\n * not conform to HTTP grammar productions.\n *\n * @param Object obj Object of headers\n * @return Headers\n */\nfunction createHeadersLenient(obj) {\n\tconst headers = new Headers();\n\tfor (const name of Object.keys(obj)) {\n\t\tif (invalidTokenRegex.test(name)) {\n\t\t\tcontinue;\n\t\t}\n\t\tif (Array.isArray(obj[name])) {\n\t\t\tfor (const val of obj[name]) {\n\t\t\t\tif (invalidHeaderCharRegex.test(val)) {\n\t\t\t\t\tcontinue;\n\t\t\t\t}\n\t\t\t\tif (headers[MAP][name] === undefined) {\n\t\t\t\t\theaders[MAP][name] = [val];\n\t\t\t\t} else {\n\t\t\t\t\theaders[MAP][name].push(val);\n\t\t\t\t}\n\t\t\t}\n\t\t} else if (!invalidHeaderCharRegex.test(obj[name])) {\n\t\t\theaders[MAP][name] = [obj[name]];\n\t\t}\n\t}\n\treturn headers;\n}\n\nconst INTERNALS$1 = Symbol('Response internals');\n\n// fix an issue where \"STATUS_CODES\" aren't a named export for node <10\nconst STATUS_CODES = http.STATUS_CODES;\n\n/**\n * Response class\n *\n * @param Stream body Readable stream\n * @param Object opts Response options\n * @return Void\n */\nclass Response {\n\tconstructor() {\n\t\tlet body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null;\n\t\tlet opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tBody.call(this, body, opts);\n\n\t\tconst status = opts.status || 200;\n\t\tconst headers = new Headers(opts.headers);\n\n\t\tif (body != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(body);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tthis[INTERNALS$1] = {\n\t\t\turl: opts.url,\n\t\t\tstatus,\n\t\t\tstatusText: opts.statusText || STATUS_CODES[status],\n\t\t\theaders,\n\t\t\tcounter: opts.counter\n\t\t};\n\t}\n\n\tget url() {\n\t\treturn this[INTERNALS$1].url || '';\n\t}\n\n\tget status() {\n\t\treturn this[INTERNALS$1].status;\n\t}\n\n\t/**\n * Convenience property representing if the request ended normally\n */\n\tget ok() {\n\t\treturn this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300;\n\t}\n\n\tget redirected() {\n\t\treturn this[INTERNALS$1].counter > 0;\n\t}\n\n\tget statusText() {\n\t\treturn this[INTERNALS$1].statusText;\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$1].headers;\n\t}\n\n\t/**\n * Clone this response\n *\n * @return Response\n */\n\tclone() {\n\t\treturn new Response(clone(this), {\n\t\t\turl: this.url,\n\t\t\tstatus: this.status,\n\t\t\tstatusText: this.statusText,\n\t\t\theaders: this.headers,\n\t\t\tok: this.ok,\n\t\t\tredirected: this.redirected\n\t\t});\n\t}\n}\n\nBody.mixIn(Response.prototype);\n\nObject.defineProperties(Response.prototype, {\n\turl: { enumerable: true },\n\tstatus: { enumerable: true },\n\tok: { enumerable: true },\n\tredirected: { enumerable: true },\n\tstatusText: { enumerable: true },\n\theaders: { enumerable: true },\n\tclone: { enumerable: true }\n});\n\nObject.defineProperty(Response.prototype, Symbol.toStringTag, {\n\tvalue: 'Response',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nconst INTERNALS$2 = Symbol('Request internals');\nconst URL = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"format\", \"parse\" aren't a named export for node <10\nconst parse_url = Url.parse;\nconst format_url = Url.format;\n\n/**\n * Wrapper around `new URL` to handle arbitrary URLs\n *\n * @param {string} urlStr\n * @return {void}\n */\nfunction parseURL(urlStr) {\n\t/*\n \tCheck whether the URL is absolute or not\n \t\tScheme: https://tools.ietf.org/html/rfc3986#section-3.1\n \tAbsolute URL: https://tools.ietf.org/html/rfc3986#section-4.3\n */\n\tif (/^[a-zA-Z][a-zA-Z\\d+\\-.]*:/.exec(urlStr)) {\n\t\turlStr = new URL(urlStr).toString();\n\t}\n\n\t// Fallback to old implementation for arbitrary URLs\n\treturn parse_url(urlStr);\n}\n\nconst streamDestructionSupported = 'destroy' in Stream.Readable.prototype;\n\n/**\n * Check if a value is an instance of Request.\n *\n * @param Mixed input\n * @return Boolean\n */\nfunction isRequest(input) {\n\treturn typeof input === 'object' && typeof input[INTERNALS$2] === 'object';\n}\n\nfunction isAbortSignal(signal) {\n\tconst proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal);\n\treturn !!(proto && proto.constructor.name === 'AbortSignal');\n}\n\n/**\n * Request class\n *\n * @param Mixed input Url or Request instance\n * @param Object init Custom options\n * @return Void\n */\nclass Request {\n\tconstructor(input) {\n\t\tlet init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {};\n\n\t\tlet parsedURL;\n\n\t\t// normalize input\n\t\tif (!isRequest(input)) {\n\t\t\tif (input && input.href) {\n\t\t\t\t// in order to support Node.js' Url objects; though WHATWG's URL objects\n\t\t\t\t// will fall into this branch also (since their `toString()` will return\n\t\t\t\t// `href` property anyway)\n\t\t\t\tparsedURL = parseURL(input.href);\n\t\t\t} else {\n\t\t\t\t// coerce input to a string before attempting to parse\n\t\t\t\tparsedURL = parseURL(`${input}`);\n\t\t\t}\n\t\t\tinput = {};\n\t\t} else {\n\t\t\tparsedURL = parseURL(input.url);\n\t\t}\n\n\t\tlet method = init.method || input.method || 'GET';\n\t\tmethod = method.toUpperCase();\n\n\t\tif ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) {\n\t\t\tthrow new TypeError('Request with GET/HEAD method cannot have body');\n\t\t}\n\n\t\tlet inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null;\n\n\t\tBody.call(this, inputBody, {\n\t\t\ttimeout: init.timeout || input.timeout || 0,\n\t\t\tsize: init.size || input.size || 0\n\t\t});\n\n\t\tconst headers = new Headers(init.headers || input.headers || {});\n\n\t\tif (inputBody != null && !headers.has('Content-Type')) {\n\t\t\tconst contentType = extractContentType(inputBody);\n\t\t\tif (contentType) {\n\t\t\t\theaders.append('Content-Type', contentType);\n\t\t\t}\n\t\t}\n\n\t\tlet signal = isRequest(input) ? input.signal : null;\n\t\tif ('signal' in init) signal = init.signal;\n\n\t\tif (signal != null && !isAbortSignal(signal)) {\n\t\t\tthrow new TypeError('Expected signal to be an instanceof AbortSignal');\n\t\t}\n\n\t\tthis[INTERNALS$2] = {\n\t\t\tmethod,\n\t\t\tredirect: init.redirect || input.redirect || 'follow',\n\t\t\theaders,\n\t\t\tparsedURL,\n\t\t\tsignal\n\t\t};\n\n\t\t// node-fetch-only options\n\t\tthis.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20;\n\t\tthis.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true;\n\t\tthis.counter = init.counter || input.counter || 0;\n\t\tthis.agent = init.agent || input.agent;\n\t}\n\n\tget method() {\n\t\treturn this[INTERNALS$2].method;\n\t}\n\n\tget url() {\n\t\treturn format_url(this[INTERNALS$2].parsedURL);\n\t}\n\n\tget headers() {\n\t\treturn this[INTERNALS$2].headers;\n\t}\n\n\tget redirect() {\n\t\treturn this[INTERNALS$2].redirect;\n\t}\n\n\tget signal() {\n\t\treturn this[INTERNALS$2].signal;\n\t}\n\n\t/**\n * Clone this request\n *\n * @return Request\n */\n\tclone() {\n\t\treturn new Request(this);\n\t}\n}\n\nBody.mixIn(Request.prototype);\n\nObject.defineProperty(Request.prototype, Symbol.toStringTag, {\n\tvalue: 'Request',\n\twritable: false,\n\tenumerable: false,\n\tconfigurable: true\n});\n\nObject.defineProperties(Request.prototype, {\n\tmethod: { enumerable: true },\n\turl: { enumerable: true },\n\theaders: { enumerable: true },\n\tredirect: { enumerable: true },\n\tclone: { enumerable: true },\n\tsignal: { enumerable: true }\n});\n\n/**\n * Convert a Request to Node.js http request options.\n *\n * @param Request A Request instance\n * @return Object The options object to be passed to http.request\n */\nfunction getNodeRequestOptions(request) {\n\tconst parsedURL = request[INTERNALS$2].parsedURL;\n\tconst headers = new Headers(request[INTERNALS$2].headers);\n\n\t// fetch step 1.3\n\tif (!headers.has('Accept')) {\n\t\theaders.set('Accept', '*/*');\n\t}\n\n\t// Basic fetch\n\tif (!parsedURL.protocol || !parsedURL.hostname) {\n\t\tthrow new TypeError('Only absolute URLs are supported');\n\t}\n\n\tif (!/^https?:$/.test(parsedURL.protocol)) {\n\t\tthrow new TypeError('Only HTTP(S) protocols are supported');\n\t}\n\n\tif (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) {\n\t\tthrow new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8');\n\t}\n\n\t// HTTP-network-or-cache fetch steps 2.4-2.7\n\tlet contentLengthValue = null;\n\tif (request.body == null && /^(POST|PUT)$/i.test(request.method)) {\n\t\tcontentLengthValue = '0';\n\t}\n\tif (request.body != null) {\n\t\tconst totalBytes = getTotalBytes(request);\n\t\tif (typeof totalBytes === 'number') {\n\t\t\tcontentLengthValue = String(totalBytes);\n\t\t}\n\t}\n\tif (contentLengthValue) {\n\t\theaders.set('Content-Length', contentLengthValue);\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.11\n\tif (!headers.has('User-Agent')) {\n\t\theaders.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)');\n\t}\n\n\t// HTTP-network-or-cache fetch step 2.15\n\tif (request.compress && !headers.has('Accept-Encoding')) {\n\t\theaders.set('Accept-Encoding', 'gzip,deflate');\n\t}\n\n\tlet agent = request.agent;\n\tif (typeof agent === 'function') {\n\t\tagent = agent(parsedURL);\n\t}\n\n\tif (!headers.has('Connection') && !agent) {\n\t\theaders.set('Connection', 'close');\n\t}\n\n\t// HTTP-network fetch step 4.2\n\t// chunked encoding is handled by Node.js\n\n\treturn Object.assign({}, parsedURL, {\n\t\tmethod: request.method,\n\t\theaders: exportNodeCompatibleHeaders(headers),\n\t\tagent\n\t});\n}\n\n/**\n * abort-error.js\n *\n * AbortError interface for cancelled requests\n */\n\n/**\n * Create AbortError instance\n *\n * @param String message Error message for human\n * @return AbortError\n */\nfunction AbortError(message) {\n Error.call(this, message);\n\n this.type = 'aborted';\n this.message = message;\n\n // hide custom error implementation details from end-users\n Error.captureStackTrace(this, this.constructor);\n}\n\nAbortError.prototype = Object.create(Error.prototype);\nAbortError.prototype.constructor = AbortError;\nAbortError.prototype.name = 'AbortError';\n\nconst URL$1 = Url.URL || whatwgUrl.URL;\n\n// fix an issue where \"PassThrough\", \"resolve\" aren't a named export for node <10\nconst PassThrough$1 = Stream.PassThrough;\n\nconst isDomainOrSubdomain = function isDomainOrSubdomain(destination, original) {\n\tconst orig = new URL$1(original).hostname;\n\tconst dest = new URL$1(destination).hostname;\n\n\treturn orig === dest || orig[orig.length - dest.length - 1] === '.' && orig.endsWith(dest);\n};\n\n/**\n * isSameProtocol reports whether the two provided URLs use the same protocol.\n *\n * Both domains must already be in canonical form.\n * @param {string|URL} original\n * @param {string|URL} destination\n */\nconst isSameProtocol = function isSameProtocol(destination, original) {\n\tconst orig = new URL$1(original).protocol;\n\tconst dest = new URL$1(destination).protocol;\n\n\treturn orig === dest;\n};\n\n/**\n * Fetch function\n *\n * @param Mixed url Absolute url or Request instance\n * @param Object opts Fetch options\n * @return Promise\n */\nfunction fetch(url, opts) {\n\n\t// allow custom promise\n\tif (!fetch.Promise) {\n\t\tthrow new Error('native promise missing, set fetch.Promise to your favorite alternative');\n\t}\n\n\tBody.Promise = fetch.Promise;\n\n\t// wrap http.request into fetch\n\treturn new fetch.Promise(function (resolve, reject) {\n\t\t// build request object\n\t\tconst request = new Request(url, opts);\n\t\tconst options = getNodeRequestOptions(request);\n\n\t\tconst send = (options.protocol === 'https:' ? https : http).request;\n\t\tconst signal = request.signal;\n\n\t\tlet response = null;\n\n\t\tconst abort = function abort() {\n\t\t\tlet error = new AbortError('The user aborted a request.');\n\t\t\treject(error);\n\t\t\tif (request.body && request.body instanceof Stream.Readable) {\n\t\t\t\tdestroyStream(request.body, error);\n\t\t\t}\n\t\t\tif (!response || !response.body) return;\n\t\t\tresponse.body.emit('error', error);\n\t\t};\n\n\t\tif (signal && signal.aborted) {\n\t\t\tabort();\n\t\t\treturn;\n\t\t}\n\n\t\tconst abortAndFinalize = function abortAndFinalize() {\n\t\t\tabort();\n\t\t\tfinalize();\n\t\t};\n\n\t\t// send request\n\t\tconst req = send(options);\n\t\tlet reqTimeout;\n\n\t\tif (signal) {\n\t\t\tsignal.addEventListener('abort', abortAndFinalize);\n\t\t}\n\n\t\tfunction finalize() {\n\t\t\treq.abort();\n\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\tclearTimeout(reqTimeout);\n\t\t}\n\n\t\tif (request.timeout) {\n\t\t\treq.once('socket', function (socket) {\n\t\t\t\treqTimeout = setTimeout(function () {\n\t\t\t\t\treject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout'));\n\t\t\t\t\tfinalize();\n\t\t\t\t}, request.timeout);\n\t\t\t});\n\t\t}\n\n\t\treq.on('error', function (err) {\n\t\t\treject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err));\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\n\t\t\tfinalize();\n\t\t});\n\n\t\tfixResponseChunkedTransferBadEnding(req, function (err) {\n\t\t\tif (signal && signal.aborted) {\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\tif (response && response.body) {\n\t\t\t\tdestroyStream(response.body, err);\n\t\t\t}\n\t\t});\n\n\t\t/* c8 ignore next 18 */\n\t\tif (parseInt(process.version.substring(1)) < 14) {\n\t\t\t// Before Node.js 14, pipeline() does not fully support async iterators and does not always\n\t\t\t// properly handle when the socket close/end events are out of order.\n\t\t\treq.on('socket', function (s) {\n\t\t\t\ts.addListener('close', function (hadError) {\n\t\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\t\tconst hasDataListener = s.listenerCount('data') > 0;\n\n\t\t\t\t\t// if end happened before close but the socket didn't emit an error, do it now\n\t\t\t\t\tif (response && hasDataListener && !hadError && !(signal && signal.aborted)) {\n\t\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\t\tresponse.body.emit('error', err);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t});\n\t\t}\n\n\t\treq.on('response', function (res) {\n\t\t\tclearTimeout(reqTimeout);\n\n\t\t\tconst headers = createHeadersLenient(res.headers);\n\n\t\t\t// HTTP fetch step 5\n\t\t\tif (fetch.isRedirect(res.statusCode)) {\n\t\t\t\t// HTTP fetch step 5.2\n\t\t\t\tconst location = headers.get('Location');\n\n\t\t\t\t// HTTP fetch step 5.3\n\t\t\t\tlet locationURL = null;\n\t\t\t\ttry {\n\t\t\t\t\tlocationURL = location === null ? null : new URL$1(location, request.url).toString();\n\t\t\t\t} catch (err) {\n\t\t\t\t\t// error here can only be invalid URL in Location: header\n\t\t\t\t\t// do not throw when options.redirect == manual\n\t\t\t\t\t// let the user extract the errorneous redirect URL\n\t\t\t\t\tif (request.redirect !== 'manual') {\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\t}\n\t\t\t\t}\n\n\t\t\t\t// HTTP fetch step 5.5\n\t\t\t\tswitch (request.redirect) {\n\t\t\t\t\tcase 'error':\n\t\t\t\t\t\treject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect'));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t\tcase 'manual':\n\t\t\t\t\t\t// node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL.\n\t\t\t\t\t\tif (locationURL !== null) {\n\t\t\t\t\t\t\t// handle corrupted header\n\t\t\t\t\t\t\ttry {\n\t\t\t\t\t\t\t\theaders.set('Location', locationURL);\n\t\t\t\t\t\t\t} catch (err) {\n\t\t\t\t\t\t\t\t// istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request\n\t\t\t\t\t\t\t\treject(err);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\t\t\t\t\t\tbreak;\n\t\t\t\t\tcase 'follow':\n\t\t\t\t\t\t// HTTP-redirect fetch step 2\n\t\t\t\t\t\tif (locationURL === null) {\n\t\t\t\t\t\t\tbreak;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 5\n\t\t\t\t\t\tif (request.counter >= request.follow) {\n\t\t\t\t\t\t\treject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 6 (counter increment)\n\t\t\t\t\t\t// Create a new Request object.\n\t\t\t\t\t\tconst requestOpts = {\n\t\t\t\t\t\t\theaders: new Headers(request.headers),\n\t\t\t\t\t\t\tfollow: request.follow,\n\t\t\t\t\t\t\tcounter: request.counter + 1,\n\t\t\t\t\t\t\tagent: request.agent,\n\t\t\t\t\t\t\tcompress: request.compress,\n\t\t\t\t\t\t\tmethod: request.method,\n\t\t\t\t\t\t\tbody: request.body,\n\t\t\t\t\t\t\tsignal: request.signal,\n\t\t\t\t\t\t\ttimeout: request.timeout,\n\t\t\t\t\t\t\tsize: request.size\n\t\t\t\t\t\t};\n\n\t\t\t\t\t\tif (!isDomainOrSubdomain(request.url, locationURL) || !isSameProtocol(request.url, locationURL)) {\n\t\t\t\t\t\t\tfor (const name of ['authorization', 'www-authenticate', 'cookie', 'cookie2']) {\n\t\t\t\t\t\t\t\trequestOpts.headers.delete(name);\n\t\t\t\t\t\t\t}\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 9\n\t\t\t\t\t\tif (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) {\n\t\t\t\t\t\t\treject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect'));\n\t\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\t\treturn;\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 11\n\t\t\t\t\t\tif (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') {\n\t\t\t\t\t\t\trequestOpts.method = 'GET';\n\t\t\t\t\t\t\trequestOpts.body = undefined;\n\t\t\t\t\t\t\trequestOpts.headers.delete('content-length');\n\t\t\t\t\t\t}\n\n\t\t\t\t\t\t// HTTP-redirect fetch step 15\n\t\t\t\t\t\tresolve(fetch(new Request(locationURL, requestOpts)));\n\t\t\t\t\t\tfinalize();\n\t\t\t\t\t\treturn;\n\t\t\t\t}\n\t\t\t}\n\n\t\t\t// prepare response\n\t\t\tres.once('end', function () {\n\t\t\t\tif (signal) signal.removeEventListener('abort', abortAndFinalize);\n\t\t\t});\n\t\t\tlet body = res.pipe(new PassThrough$1());\n\n\t\t\tconst response_options = {\n\t\t\t\turl: request.url,\n\t\t\t\tstatus: res.statusCode,\n\t\t\t\tstatusText: res.statusMessage,\n\t\t\t\theaders: headers,\n\t\t\t\tsize: request.size,\n\t\t\t\ttimeout: request.timeout,\n\t\t\t\tcounter: request.counter\n\t\t\t};\n\n\t\t\t// HTTP-network fetch step 12.1.1.3\n\t\t\tconst codings = headers.get('Content-Encoding');\n\n\t\t\t// HTTP-network fetch step 12.1.1.4: handle content codings\n\n\t\t\t// in following scenarios we ignore compression support\n\t\t\t// 1. compression support is disabled\n\t\t\t// 2. HEAD request\n\t\t\t// 3. no Content-Encoding header\n\t\t\t// 4. no content response (204)\n\t\t\t// 5. content not modified response (304)\n\t\t\tif (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) {\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// For Node v6+\n\t\t\t// Be less strict when decoding compressed responses, since sometimes\n\t\t\t// servers send slightly invalid responses that are still accepted\n\t\t\t// by common browsers.\n\t\t\t// Always using Z_SYNC_FLUSH is what cURL does.\n\t\t\tconst zlibOptions = {\n\t\t\t\tflush: zlib.Z_SYNC_FLUSH,\n\t\t\t\tfinishFlush: zlib.Z_SYNC_FLUSH\n\t\t\t};\n\n\t\t\t// for gzip\n\t\t\tif (codings == 'gzip' || codings == 'x-gzip') {\n\t\t\t\tbody = body.pipe(zlib.createGunzip(zlibOptions));\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for deflate\n\t\t\tif (codings == 'deflate' || codings == 'x-deflate') {\n\t\t\t\t// handle the infamous raw deflate response from old servers\n\t\t\t\t// a hack for old IIS and Apache servers\n\t\t\t\tconst raw = res.pipe(new PassThrough$1());\n\t\t\t\traw.once('data', function (chunk) {\n\t\t\t\t\t// see http://stackoverflow.com/questions/37519828\n\t\t\t\t\tif ((chunk[0] & 0x0F) === 0x08) {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflate());\n\t\t\t\t\t} else {\n\t\t\t\t\t\tbody = body.pipe(zlib.createInflateRaw());\n\t\t\t\t\t}\n\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\tresolve(response);\n\t\t\t\t});\n\t\t\t\traw.on('end', function () {\n\t\t\t\t\t// some old IIS servers return zero-length OK deflate responses, so 'data' is never emitted.\n\t\t\t\t\tif (!response) {\n\t\t\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\t\t\tresolve(response);\n\t\t\t\t\t}\n\t\t\t\t});\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// for br\n\t\t\tif (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') {\n\t\t\t\tbody = body.pipe(zlib.createBrotliDecompress());\n\t\t\t\tresponse = new Response(body, response_options);\n\t\t\t\tresolve(response);\n\t\t\t\treturn;\n\t\t\t}\n\n\t\t\t// otherwise, use response as-is\n\t\t\tresponse = new Response(body, response_options);\n\t\t\tresolve(response);\n\t\t});\n\n\t\twriteToStream(req, request);\n\t});\n}\nfunction fixResponseChunkedTransferBadEnding(request, errorCallback) {\n\tlet socket;\n\n\trequest.on('socket', function (s) {\n\t\tsocket = s;\n\t});\n\n\trequest.on('response', function (response) {\n\t\tconst headers = response.headers;\n\n\t\tif (headers['transfer-encoding'] === 'chunked' && !headers['content-length']) {\n\t\t\tresponse.once('close', function (hadError) {\n\t\t\t\t// if a data listener is still present we didn't end cleanly\n\t\t\t\tconst hasDataListener = socket.listenerCount('data') > 0;\n\n\t\t\t\tif (hasDataListener && !hadError) {\n\t\t\t\t\tconst err = new Error('Premature close');\n\t\t\t\t\terr.code = 'ERR_STREAM_PREMATURE_CLOSE';\n\t\t\t\t\terrorCallback(err);\n\t\t\t\t}\n\t\t\t});\n\t\t}\n\t});\n}\n\nfunction destroyStream(stream, err) {\n\tif (stream.destroy) {\n\t\tstream.destroy(err);\n\t} else {\n\t\t// node < 8\n\t\tstream.emit('error', err);\n\t\tstream.end();\n\t}\n}\n\n/**\n * Redirect code matching\n *\n * @param Number code Status code\n * @return Boolean\n */\nfetch.isRedirect = function (code) {\n\treturn code === 301 || code === 302 || code === 303 || code === 307 || code === 308;\n};\n\n// expose Promise\nfetch.Promise = global.Promise;\n\nmodule.exports = exports = fetch;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.default = exports;\nexports.Headers = Headers;\nexports.Request = Request;\nexports.Response = Response;\nexports.FetchError = FetchError;\n","var wrappy = require('wrappy')\nmodule.exports = wrappy(once)\nmodule.exports.strict = wrappy(onceStrict)\n\nonce.proto = once(function () {\n Object.defineProperty(Function.prototype, 'once', {\n value: function () {\n return once(this)\n },\n configurable: true\n })\n\n Object.defineProperty(Function.prototype, 'onceStrict', {\n value: function () {\n return onceStrict(this)\n },\n configurable: true\n })\n})\n\nfunction once (fn) {\n var f = function () {\n if (f.called) return f.value\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n f.called = false\n return f\n}\n\nfunction onceStrict (fn) {\n var f = function () {\n if (f.called)\n throw new Error(f.onceError)\n f.called = true\n return f.value = fn.apply(this, arguments)\n }\n var name = fn.name || 'Function wrapped with `once`'\n f.onceError = name + \" shouldn't be called more than once\"\n f.called = false\n return f\n}\n","\"use strict\";function _typeof(obj){\"@babel/helpers - typeof\";return _typeof=\"function\"==typeof Symbol&&\"symbol\"==typeof Symbol.iterator?function(obj){return typeof obj}:function(obj){return obj&&\"function\"==typeof Symbol&&obj.constructor===Symbol&&obj!==Symbol.prototype?\"symbol\":typeof obj},_typeof(obj)}function _createForOfIteratorHelper(o,allowArrayLike){var it=typeof Symbol!==\"undefined\"&&o[Symbol.iterator]||o[\"@@iterator\"];if(!it){if(Array.isArray(o)||(it=_unsupportedIterableToArray(o))||allowArrayLike&&o&&typeof o.length===\"number\"){if(it)o=it;var i=0;var F=function F(){};return{s:F,n:function n(){if(i>=o.length)return{done:true};return{done:false,value:o[i++]}},e:function e(_e2){throw _e2},f:F}}throw new TypeError(\"Invalid attempt to iterate non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\")}var normalCompletion=true,didErr=false,err;return{s:function s(){it=it.call(o)},n:function n(){var step=it.next();normalCompletion=step.done;return step},e:function e(_e3){didErr=true;err=_e3},f:function f(){try{if(!normalCompletion&&it[\"return\"]!=null)it[\"return\"]()}finally{if(didErr)throw err}}}}function _defineProperty(obj,key,value){key=_toPropertyKey(key);if(key in obj){Object.defineProperty(obj,key,{value:value,enumerable:true,configurable:true,writable:true})}else{obj[key]=value}return obj}function _toPropertyKey(arg){var key=_toPrimitive(arg,\"string\");return _typeof(key)===\"symbol\"?key:String(key)}function _toPrimitive(input,hint){if(_typeof(input)!==\"object\"||input===null)return input;var prim=input[Symbol.toPrimitive];if(prim!==undefined){var res=prim.call(input,hint||\"default\");if(_typeof(res)!==\"object\")return res;throw new TypeError(\"@@toPrimitive must return a primitive value.\")}return(hint===\"string\"?String:Number)(input)}function _slicedToArray(arr,i){return _arrayWithHoles(arr)||_iterableToArrayLimit(arr,i)||_unsupportedIterableToArray(arr,i)||_nonIterableRest()}function _nonIterableRest(){throw new TypeError(\"Invalid attempt to destructure non-iterable instance.\\nIn order to be iterable, non-array objects must have a [Symbol.iterator]() method.\")}function _unsupportedIterableToArray(o,minLen){if(!o)return;if(typeof o===\"string\")return _arrayLikeToArray(o,minLen);var n=Object.prototype.toString.call(o).slice(8,-1);if(n===\"Object\"&&o.constructor)n=o.constructor.name;if(n===\"Map\"||n===\"Set\")return Array.from(o);if(n===\"Arguments\"||/^(?:Ui|I)nt(?:8|16|32)(?:Clamped)?Array$/.test(n))return _arrayLikeToArray(o,minLen)}function _arrayLikeToArray(arr,len){if(len==null||len>arr.length)len=arr.length;for(var i=0,arr2=new Array(len);i= val) {\n return target;\n } else if (target[0][0] > val) {\n end = mid - 1;\n } else {\n start = mid + 1;\n }\n }\n\n return null;\n}\n\nvar regexAstralSymbols = /[\\uD800-\\uDBFF][\\uDC00-\\uDFFF]/g;\n\nfunction countSymbols(string) {\n return string\n // replace every surrogate pair with a BMP symbol\n .replace(regexAstralSymbols, '_')\n // then get the length\n .length;\n}\n\nfunction mapChars(domain_name, useSTD3, processing_option) {\n var hasError = false;\n var processed = \"\";\n\n var len = countSymbols(domain_name);\n for (var i = 0; i < len; ++i) {\n var codePoint = domain_name.codePointAt(i);\n var status = findStatus(codePoint);\n\n switch (status[1]) {\n case \"disallowed\":\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n break;\n case \"ignored\":\n break;\n case \"mapped\":\n processed += String.fromCodePoint.apply(String, status[2]);\n break;\n case \"deviation\":\n if (processing_option === PROCESSING_OPTIONS.TRANSITIONAL) {\n processed += String.fromCodePoint.apply(String, status[2]);\n } else {\n processed += String.fromCodePoint(codePoint);\n }\n break;\n case \"valid\":\n processed += String.fromCodePoint(codePoint);\n break;\n case \"disallowed_STD3_mapped\":\n if (useSTD3) {\n hasError = true;\n processed += String.fromCodePoint(codePoint);\n } else {\n processed += String.fromCodePoint.apply(String, status[2]);\n }\n break;\n case \"disallowed_STD3_valid\":\n if (useSTD3) {\n hasError = true;\n }\n\n processed += String.fromCodePoint(codePoint);\n break;\n }\n }\n\n return {\n string: processed,\n error: hasError\n };\n}\n\nvar combiningMarksRegex = /[\\u0300-\\u036F\\u0483-\\u0489\\u0591-\\u05BD\\u05BF\\u05C1\\u05C2\\u05C4\\u05C5\\u05C7\\u0610-\\u061A\\u064B-\\u065F\\u0670\\u06D6-\\u06DC\\u06DF-\\u06E4\\u06E7\\u06E8\\u06EA-\\u06ED\\u0711\\u0730-\\u074A\\u07A6-\\u07B0\\u07EB-\\u07F3\\u0816-\\u0819\\u081B-\\u0823\\u0825-\\u0827\\u0829-\\u082D\\u0859-\\u085B\\u08E4-\\u0903\\u093A-\\u093C\\u093E-\\u094F\\u0951-\\u0957\\u0962\\u0963\\u0981-\\u0983\\u09BC\\u09BE-\\u09C4\\u09C7\\u09C8\\u09CB-\\u09CD\\u09D7\\u09E2\\u09E3\\u0A01-\\u0A03\\u0A3C\\u0A3E-\\u0A42\\u0A47\\u0A48\\u0A4B-\\u0A4D\\u0A51\\u0A70\\u0A71\\u0A75\\u0A81-\\u0A83\\u0ABC\\u0ABE-\\u0AC5\\u0AC7-\\u0AC9\\u0ACB-\\u0ACD\\u0AE2\\u0AE3\\u0B01-\\u0B03\\u0B3C\\u0B3E-\\u0B44\\u0B47\\u0B48\\u0B4B-\\u0B4D\\u0B56\\u0B57\\u0B62\\u0B63\\u0B82\\u0BBE-\\u0BC2\\u0BC6-\\u0BC8\\u0BCA-\\u0BCD\\u0BD7\\u0C00-\\u0C03\\u0C3E-\\u0C44\\u0C46-\\u0C48\\u0C4A-\\u0C4D\\u0C55\\u0C56\\u0C62\\u0C63\\u0C81-\\u0C83\\u0CBC\\u0CBE-\\u0CC4\\u0CC6-\\u0CC8\\u0CCA-\\u0CCD\\u0CD5\\u0CD6\\u0CE2\\u0CE3\\u0D01-\\u0D03\\u0D3E-\\u0D44\\u0D46-\\u0D48\\u0D4A-\\u0D4D\\u0D57\\u0D62\\u0D63\\u0D82\\u0D83\\u0DCA\\u0DCF-\\u0DD4\\u0DD6\\u0DD8-\\u0DDF\\u0DF2\\u0DF3\\u0E31\\u0E34-\\u0E3A\\u0E47-\\u0E4E\\u0EB1\\u0EB4-\\u0EB9\\u0EBB\\u0EBC\\u0EC8-\\u0ECD\\u0F18\\u0F19\\u0F35\\u0F37\\u0F39\\u0F3E\\u0F3F\\u0F71-\\u0F84\\u0F86\\u0F87\\u0F8D-\\u0F97\\u0F99-\\u0FBC\\u0FC6\\u102B-\\u103E\\u1056-\\u1059\\u105E-\\u1060\\u1062-\\u1064\\u1067-\\u106D\\u1071-\\u1074\\u1082-\\u108D\\u108F\\u109A-\\u109D\\u135D-\\u135F\\u1712-\\u1714\\u1732-\\u1734\\u1752\\u1753\\u1772\\u1773\\u17B4-\\u17D3\\u17DD\\u180B-\\u180D\\u18A9\\u1920-\\u192B\\u1930-\\u193B\\u19B0-\\u19C0\\u19C8\\u19C9\\u1A17-\\u1A1B\\u1A55-\\u1A5E\\u1A60-\\u1A7C\\u1A7F\\u1AB0-\\u1ABE\\u1B00-\\u1B04\\u1B34-\\u1B44\\u1B6B-\\u1B73\\u1B80-\\u1B82\\u1BA1-\\u1BAD\\u1BE6-\\u1BF3\\u1C24-\\u1C37\\u1CD0-\\u1CD2\\u1CD4-\\u1CE8\\u1CED\\u1CF2-\\u1CF4\\u1CF8\\u1CF9\\u1DC0-\\u1DF5\\u1DFC-\\u1DFF\\u20D0-\\u20F0\\u2CEF-\\u2CF1\\u2D7F\\u2DE0-\\u2DFF\\u302A-\\u302F\\u3099\\u309A\\uA66F-\\uA672\\uA674-\\uA67D\\uA69F\\uA6F0\\uA6F1\\uA802\\uA806\\uA80B\\uA823-\\uA827\\uA880\\uA881\\uA8B4-\\uA8C4\\uA8E0-\\uA8F1\\uA926-\\uA92D\\uA947-\\uA953\\uA980-\\uA983\\uA9B3-\\uA9C0\\uA9E5\\uAA29-\\uAA36\\uAA43\\uAA4C\\uAA4D\\uAA7B-\\uAA7D\\uAAB0\\uAAB2-\\uAAB4\\uAAB7\\uAAB8\\uAABE\\uAABF\\uAAC1\\uAAEB-\\uAAEF\\uAAF5\\uAAF6\\uABE3-\\uABEA\\uABEC\\uABED\\uFB1E\\uFE00-\\uFE0F\\uFE20-\\uFE2D]|\\uD800[\\uDDFD\\uDEE0\\uDF76-\\uDF7A]|\\uD802[\\uDE01-\\uDE03\\uDE05\\uDE06\\uDE0C-\\uDE0F\\uDE38-\\uDE3A\\uDE3F\\uDEE5\\uDEE6]|\\uD804[\\uDC00-\\uDC02\\uDC38-\\uDC46\\uDC7F-\\uDC82\\uDCB0-\\uDCBA\\uDD00-\\uDD02\\uDD27-\\uDD34\\uDD73\\uDD80-\\uDD82\\uDDB3-\\uDDC0\\uDE2C-\\uDE37\\uDEDF-\\uDEEA\\uDF01-\\uDF03\\uDF3C\\uDF3E-\\uDF44\\uDF47\\uDF48\\uDF4B-\\uDF4D\\uDF57\\uDF62\\uDF63\\uDF66-\\uDF6C\\uDF70-\\uDF74]|\\uD805[\\uDCB0-\\uDCC3\\uDDAF-\\uDDB5\\uDDB8-\\uDDC0\\uDE30-\\uDE40\\uDEAB-\\uDEB7]|\\uD81A[\\uDEF0-\\uDEF4\\uDF30-\\uDF36]|\\uD81B[\\uDF51-\\uDF7E\\uDF8F-\\uDF92]|\\uD82F[\\uDC9D\\uDC9E]|\\uD834[\\uDD65-\\uDD69\\uDD6D-\\uDD72\\uDD7B-\\uDD82\\uDD85-\\uDD8B\\uDDAA-\\uDDAD\\uDE42-\\uDE44]|\\uD83A[\\uDCD0-\\uDCD6]|\\uDB40[\\uDD00-\\uDDEF]/;\n\nfunction validateLabel(label, processing_option) {\n if (label.substr(0, 4) === \"xn--\") {\n label = punycode.toUnicode(label);\n processing_option = PROCESSING_OPTIONS.NONTRANSITIONAL;\n }\n\n var error = false;\n\n if (normalize(label) !== label ||\n (label[3] === \"-\" && label[4] === \"-\") ||\n label[0] === \"-\" || label[label.length - 1] === \"-\" ||\n label.indexOf(\".\") !== -1 ||\n label.search(combiningMarksRegex) === 0) {\n error = true;\n }\n\n var len = countSymbols(label);\n for (var i = 0; i < len; ++i) {\n var status = findStatus(label.codePointAt(i));\n if ((processing === PROCESSING_OPTIONS.TRANSITIONAL && status[1] !== \"valid\") ||\n (processing === PROCESSING_OPTIONS.NONTRANSITIONAL &&\n status[1] !== \"valid\" && status[1] !== \"deviation\")) {\n error = true;\n break;\n }\n }\n\n return {\n label: label,\n error: error\n };\n}\n\nfunction processing(domain_name, useSTD3, processing_option) {\n var result = mapChars(domain_name, useSTD3, processing_option);\n result.string = normalize(result.string);\n\n var labels = result.string.split(\".\");\n for (var i = 0; i < labels.length; ++i) {\n try {\n var validation = validateLabel(labels[i]);\n labels[i] = validation.label;\n result.error = result.error || validation.error;\n } catch(e) {\n result.error = true;\n }\n }\n\n return {\n string: labels.join(\".\"),\n error: result.error\n };\n}\n\nmodule.exports.toASCII = function(domain_name, useSTD3, processing_option, verifyDnsLength) {\n var result = processing(domain_name, useSTD3, processing_option);\n var labels = result.string.split(\".\");\n labels = labels.map(function(l) {\n try {\n return punycode.toASCII(l);\n } catch(e) {\n result.error = true;\n return l;\n }\n });\n\n if (verifyDnsLength) {\n var total = labels.slice(0, labels.length - 1).join(\".\").length;\n if (total.length > 253 || total.length === 0) {\n result.error = true;\n }\n\n for (var i=0; i < labels.length; ++i) {\n if (labels.length > 63 || labels.length === 0) {\n result.error = true;\n break;\n }\n }\n }\n\n if (result.error) return null;\n return labels.join(\".\");\n};\n\nmodule.exports.toUnicode = function(domain_name, useSTD3) {\n var result = processing(domain_name, useSTD3, PROCESSING_OPTIONS.NONTRANSITIONAL);\n\n return {\n domain: result.string,\n error: result.error\n };\n};\n\nmodule.exports.PROCESSING_OPTIONS = PROCESSING_OPTIONS;\n","module.exports = require('./lib/tunnel');\n","'use strict';\n\nvar net = require('net');\nvar tls = require('tls');\nvar http = require('http');\nvar https = require('https');\nvar events = require('events');\nvar assert = require('assert');\nvar util = require('util');\n\n\nexports.httpOverHttp = httpOverHttp;\nexports.httpsOverHttp = httpsOverHttp;\nexports.httpOverHttps = httpOverHttps;\nexports.httpsOverHttps = httpsOverHttps;\n\n\nfunction httpOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n return agent;\n}\n\nfunction httpsOverHttp(options) {\n var agent = new TunnelingAgent(options);\n agent.request = http.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\nfunction httpOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n return agent;\n}\n\nfunction httpsOverHttps(options) {\n var agent = new TunnelingAgent(options);\n agent.request = https.request;\n agent.createSocket = createSecureSocket;\n agent.defaultPort = 443;\n return agent;\n}\n\n\nfunction TunnelingAgent(options) {\n var self = this;\n self.options = options || {};\n self.proxyOptions = self.options.proxy || {};\n self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets;\n self.requests = [];\n self.sockets = [];\n\n self.on('free', function onFree(socket, host, port, localAddress) {\n var options = toOptions(host, port, localAddress);\n for (var i = 0, len = self.requests.length; i < len; ++i) {\n var pending = self.requests[i];\n if (pending.host === options.host && pending.port === options.port) {\n // Detect the request to connect same origin server,\n // reuse the connection.\n self.requests.splice(i, 1);\n pending.request.onSocket(socket);\n return;\n }\n }\n socket.destroy();\n self.removeSocket(socket);\n });\n}\nutil.inherits(TunnelingAgent, events.EventEmitter);\n\nTunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) {\n var self = this;\n var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress));\n\n if (self.sockets.length >= this.maxSockets) {\n // We are over limit so we'll add it to the queue.\n self.requests.push(options);\n return;\n }\n\n // If we are under maxSockets create a new one.\n self.createSocket(options, function(socket) {\n socket.on('free', onFree);\n socket.on('close', onCloseOrRemove);\n socket.on('agentRemove', onCloseOrRemove);\n req.onSocket(socket);\n\n function onFree() {\n self.emit('free', socket, options);\n }\n\n function onCloseOrRemove(err) {\n self.removeSocket(socket);\n socket.removeListener('free', onFree);\n socket.removeListener('close', onCloseOrRemove);\n socket.removeListener('agentRemove', onCloseOrRemove);\n }\n });\n};\n\nTunnelingAgent.prototype.createSocket = function createSocket(options, cb) {\n var self = this;\n var placeholder = {};\n self.sockets.push(placeholder);\n\n var connectOptions = mergeOptions({}, self.proxyOptions, {\n method: 'CONNECT',\n path: options.host + ':' + options.port,\n agent: false,\n headers: {\n host: options.host + ':' + options.port\n }\n });\n if (options.localAddress) {\n connectOptions.localAddress = options.localAddress;\n }\n if (connectOptions.proxyAuth) {\n connectOptions.headers = connectOptions.headers || {};\n connectOptions.headers['Proxy-Authorization'] = 'Basic ' +\n new Buffer(connectOptions.proxyAuth).toString('base64');\n }\n\n debug('making CONNECT request');\n var connectReq = self.request(connectOptions);\n connectReq.useChunkedEncodingByDefault = false; // for v0.6\n connectReq.once('response', onResponse); // for v0.6\n connectReq.once('upgrade', onUpgrade); // for v0.6\n connectReq.once('connect', onConnect); // for v0.7 or later\n connectReq.once('error', onError);\n connectReq.end();\n\n function onResponse(res) {\n // Very hacky. This is necessary to avoid http-parser leaks.\n res.upgrade = true;\n }\n\n function onUpgrade(res, socket, head) {\n // Hacky.\n process.nextTick(function() {\n onConnect(res, socket, head);\n });\n }\n\n function onConnect(res, socket, head) {\n connectReq.removeAllListeners();\n socket.removeAllListeners();\n\n if (res.statusCode !== 200) {\n debug('tunneling socket could not be established, statusCode=%d',\n res.statusCode);\n socket.destroy();\n var error = new Error('tunneling socket could not be established, ' +\n 'statusCode=' + res.statusCode);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n if (head.length > 0) {\n debug('got illegal response body from proxy');\n socket.destroy();\n var error = new Error('got illegal response body from proxy');\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n return;\n }\n debug('tunneling connection has established');\n self.sockets[self.sockets.indexOf(placeholder)] = socket;\n return cb(socket);\n }\n\n function onError(cause) {\n connectReq.removeAllListeners();\n\n debug('tunneling socket could not be established, cause=%s\\n',\n cause.message, cause.stack);\n var error = new Error('tunneling socket could not be established, ' +\n 'cause=' + cause.message);\n error.code = 'ECONNRESET';\n options.request.emit('error', error);\n self.removeSocket(placeholder);\n }\n};\n\nTunnelingAgent.prototype.removeSocket = function removeSocket(socket) {\n var pos = this.sockets.indexOf(socket)\n if (pos === -1) {\n return;\n }\n this.sockets.splice(pos, 1);\n\n var pending = this.requests.shift();\n if (pending) {\n // If we have pending requests and a socket gets closed a new one\n // needs to be created to take over in the pool for the one that closed.\n this.createSocket(pending, function(socket) {\n pending.request.onSocket(socket);\n });\n }\n};\n\nfunction createSecureSocket(options, cb) {\n var self = this;\n TunnelingAgent.prototype.createSocket.call(self, options, function(socket) {\n var hostHeader = options.request.getHeader('host');\n var tlsOptions = mergeOptions({}, self.options, {\n socket: socket,\n servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host\n });\n\n // 0 is dummy port for v0.6\n var secureSocket = tls.connect(0, tlsOptions);\n self.sockets[self.sockets.indexOf(socket)] = secureSocket;\n cb(secureSocket);\n });\n}\n\n\nfunction toOptions(host, port, localAddress) {\n if (typeof host === 'string') { // since v0.10\n return {\n host: host,\n port: port,\n localAddress: localAddress\n };\n }\n return host; // for v0.11 or later\n}\n\nfunction mergeOptions(target) {\n for (var i = 1, len = arguments.length; i < len; ++i) {\n var overrides = arguments[i];\n if (typeof overrides === 'object') {\n var keys = Object.keys(overrides);\n for (var j = 0, keyLen = keys.length; j < keyLen; ++j) {\n var k = keys[j];\n if (overrides[k] !== undefined) {\n target[k] = overrides[k];\n }\n }\n }\n }\n return target;\n}\n\n\nvar debug;\nif (process.env.NODE_DEBUG && /\\btunnel\\b/.test(process.env.NODE_DEBUG)) {\n debug = function() {\n var args = Array.prototype.slice.call(arguments);\n if (typeof args[0] === 'string') {\n args[0] = 'TUNNEL: ' + args[0];\n } else {\n args.unshift('TUNNEL:');\n }\n console.error.apply(console, args);\n }\n} else {\n debug = function() {};\n}\nexports.debug = debug; // for test\n","'use strict';\n\nObject.defineProperty(exports, '__esModule', { value: true });\n\nfunction getUserAgent() {\n if (typeof navigator === \"object\" && \"userAgent\" in navigator) {\n return navigator.userAgent;\n }\n\n if (typeof process === \"object\" && \"version\" in process) {\n return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`;\n }\n\n return \"\";\n}\n\nexports.getUserAgent = getUserAgent;\n//# sourceMappingURL=index.js.map\n","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nObject.defineProperty(exports, \"v1\", {\n enumerable: true,\n get: function () {\n return _v.default;\n }\n});\nObject.defineProperty(exports, \"v3\", {\n enumerable: true,\n get: function () {\n return _v2.default;\n }\n});\nObject.defineProperty(exports, \"v4\", {\n enumerable: true,\n get: function () {\n return _v3.default;\n }\n});\nObject.defineProperty(exports, \"v5\", {\n enumerable: true,\n get: function () {\n return _v4.default;\n }\n});\nObject.defineProperty(exports, \"NIL\", {\n enumerable: true,\n get: function () {\n return _nil.default;\n }\n});\nObject.defineProperty(exports, \"version\", {\n enumerable: true,\n get: function () {\n return _version.default;\n }\n});\nObject.defineProperty(exports, \"validate\", {\n enumerable: true,\n get: function () {\n return _validate.default;\n }\n});\nObject.defineProperty(exports, \"stringify\", {\n enumerable: true,\n get: function () {\n return _stringify.default;\n }\n});\nObject.defineProperty(exports, \"parse\", {\n enumerable: true,\n get: function () {\n return _parse.default;\n }\n});\n\nvar _v = _interopRequireDefault(require(\"./v1.js\"));\n\nvar _v2 = _interopRequireDefault(require(\"./v3.js\"));\n\nvar _v3 = _interopRequireDefault(require(\"./v4.js\"));\n\nvar _v4 = _interopRequireDefault(require(\"./v5.js\"));\n\nvar _nil = _interopRequireDefault(require(\"./nil.js\"));\n\nvar _version = _interopRequireDefault(require(\"./version.js\"));\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction md5(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('md5').update(bytes).digest();\n}\n\nvar _default = md5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = '00000000-0000-0000-0000-000000000000';\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction parse(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n let v;\n const arr = new Uint8Array(16); // Parse ########-....-....-....-............\n\n arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24;\n arr[1] = v >>> 16 & 0xff;\n arr[2] = v >>> 8 & 0xff;\n arr[3] = v & 0xff; // Parse ........-####-....-....-............\n\n arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8;\n arr[5] = v & 0xff; // Parse ........-....-####-....-............\n\n arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8;\n arr[7] = v & 0xff; // Parse ........-....-....-####-............\n\n arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8;\n arr[9] = v & 0xff; // Parse ........-....-....-....-############\n // (Use \"/\" to avoid 32-bit truncation when bit-shifting high-order bytes)\n\n arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff;\n arr[11] = v / 0x100000000 & 0xff;\n arr[12] = v >>> 24 & 0xff;\n arr[13] = v >>> 16 & 0xff;\n arr[14] = v >>> 8 & 0xff;\n arr[15] = v & 0xff;\n return arr;\n}\n\nvar _default = parse;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\nvar _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = rng;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate\n\nlet poolPtr = rnds8Pool.length;\n\nfunction rng() {\n if (poolPtr > rnds8Pool.length - 16) {\n _crypto.default.randomFillSync(rnds8Pool);\n\n poolPtr = 0;\n }\n\n return rnds8Pool.slice(poolPtr, poolPtr += 16);\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _crypto = _interopRequireDefault(require(\"crypto\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction sha1(bytes) {\n if (Array.isArray(bytes)) {\n bytes = Buffer.from(bytes);\n } else if (typeof bytes === 'string') {\n bytes = Buffer.from(bytes, 'utf8');\n }\n\n return _crypto.default.createHash('sha1').update(bytes).digest();\n}\n\nvar _default = sha1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n/**\n * Convert array of 16 byte values to UUID string format of the form:\n * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX\n */\nconst byteToHex = [];\n\nfor (let i = 0; i < 256; ++i) {\n byteToHex.push((i + 0x100).toString(16).substr(1));\n}\n\nfunction stringify(arr, offset = 0) {\n // Note: Be careful editing this code! It's been tuned for performance\n // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434\n const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one\n // of the following:\n // - One or more input array values don't map to a hex octet (leading to\n // \"undefined\" in the uuid)\n // - Invalid input values for the RFC `version` or `variant` fields\n\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Stringified UUID is invalid');\n }\n\n return uuid;\n}\n\nvar _default = stringify;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\n// **`v1()` - Generate time-based UUID**\n//\n// Inspired by https://github.com/LiosK/UUID.js\n// and http://docs.python.org/library/uuid.html\nlet _nodeId;\n\nlet _clockseq; // Previous uuid creation time\n\n\nlet _lastMSecs = 0;\nlet _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details\n\nfunction v1(options, buf, offset) {\n let i = buf && offset || 0;\n const b = buf || new Array(16);\n options = options || {};\n let node = options.node || _nodeId;\n let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not\n // specified. We do this lazily to minimize issues related to insufficient\n // system entropy. See #189\n\n if (node == null || clockseq == null) {\n const seedBytes = options.random || (options.rng || _rng.default)();\n\n if (node == null) {\n // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1)\n node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]];\n }\n\n if (clockseq == null) {\n // Per 4.2.2, randomize (14 bit) clockseq\n clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff;\n }\n } // UUID timestamps are 100 nano-second units since the Gregorian epoch,\n // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so\n // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs'\n // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00.\n\n\n let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock\n // cycle to simulate higher resolution clock\n\n let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs)\n\n const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression\n\n if (dt < 0 && options.clockseq === undefined) {\n clockseq = clockseq + 1 & 0x3fff;\n } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new\n // time interval\n\n\n if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) {\n nsecs = 0;\n } // Per 4.2.1.2 Throw error if too many uuids are requested\n\n\n if (nsecs >= 10000) {\n throw new Error(\"uuid.v1(): Can't create more than 10M uuids/sec\");\n }\n\n _lastMSecs = msecs;\n _lastNSecs = nsecs;\n _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch\n\n msecs += 12219292800000; // `time_low`\n\n const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000;\n b[i++] = tl >>> 24 & 0xff;\n b[i++] = tl >>> 16 & 0xff;\n b[i++] = tl >>> 8 & 0xff;\n b[i++] = tl & 0xff; // `time_mid`\n\n const tmh = msecs / 0x100000000 * 10000 & 0xfffffff;\n b[i++] = tmh >>> 8 & 0xff;\n b[i++] = tmh & 0xff; // `time_high_and_version`\n\n b[i++] = tmh >>> 24 & 0xf | 0x10; // include version\n\n b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant)\n\n b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low`\n\n b[i++] = clockseq & 0xff; // `node`\n\n for (let n = 0; n < 6; ++n) {\n b[i + n] = node[n];\n }\n\n return buf || (0, _stringify.default)(b);\n}\n\nvar _default = v1;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _md = _interopRequireDefault(require(\"./md5.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v3 = (0, _v.default)('v3', 0x30, _md.default);\nvar _default = v3;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = _default;\nexports.URL = exports.DNS = void 0;\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nvar _parse = _interopRequireDefault(require(\"./parse.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction stringToBytes(str) {\n str = unescape(encodeURIComponent(str)); // UTF8 escape\n\n const bytes = [];\n\n for (let i = 0; i < str.length; ++i) {\n bytes.push(str.charCodeAt(i));\n }\n\n return bytes;\n}\n\nconst DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8';\nexports.DNS = DNS;\nconst URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8';\nexports.URL = URL;\n\nfunction _default(name, version, hashfunc) {\n function generateUUID(value, namespace, buf, offset) {\n if (typeof value === 'string') {\n value = stringToBytes(value);\n }\n\n if (typeof namespace === 'string') {\n namespace = (0, _parse.default)(namespace);\n }\n\n if (namespace.length !== 16) {\n throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)');\n } // Compute hash of namespace and value, Per 4.3\n // Future: Use spread syntax when supported on all platforms, e.g. `bytes =\n // hashfunc([...namespace, ... value])`\n\n\n let bytes = new Uint8Array(16 + value.length);\n bytes.set(namespace);\n bytes.set(value, namespace.length);\n bytes = hashfunc(bytes);\n bytes[6] = bytes[6] & 0x0f | version;\n bytes[8] = bytes[8] & 0x3f | 0x80;\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = bytes[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(bytes);\n } // Function#name is not settable on some platforms (#270)\n\n\n try {\n generateUUID.name = name; // eslint-disable-next-line no-empty\n } catch (err) {} // For CommonJS default export support\n\n\n generateUUID.DNS = DNS;\n generateUUID.URL = URL;\n return generateUUID;\n}","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _rng = _interopRequireDefault(require(\"./rng.js\"));\n\nvar _stringify = _interopRequireDefault(require(\"./stringify.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction v4(options, buf, offset) {\n options = options || {};\n\n const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved`\n\n\n rnds[6] = rnds[6] & 0x0f | 0x40;\n rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided\n\n if (buf) {\n offset = offset || 0;\n\n for (let i = 0; i < 16; ++i) {\n buf[offset + i] = rnds[i];\n }\n\n return buf;\n }\n\n return (0, _stringify.default)(rnds);\n}\n\nvar _default = v4;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _v = _interopRequireDefault(require(\"./v35.js\"));\n\nvar _sha = _interopRequireDefault(require(\"./sha1.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nconst v5 = (0, _v.default)('v5', 0x50, _sha.default);\nvar _default = v5;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _regex = _interopRequireDefault(require(\"./regex.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction validate(uuid) {\n return typeof uuid === 'string' && _regex.default.test(uuid);\n}\n\nvar _default = validate;\nexports.default = _default;","\"use strict\";\n\nObject.defineProperty(exports, \"__esModule\", {\n value: true\n});\nexports.default = void 0;\n\nvar _validate = _interopRequireDefault(require(\"./validate.js\"));\n\nfunction _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }\n\nfunction version(uuid) {\n if (!(0, _validate.default)(uuid)) {\n throw TypeError('Invalid UUID');\n }\n\n return parseInt(uuid.substr(14, 1), 16);\n}\n\nvar _default = version;\nexports.default = _default;","/**\n * web-streams-polyfill v3.2.1\n */\n(function (global, factory) {\n typeof exports === 'object' && typeof module !== 'undefined' ? factory(exports) :\n typeof define === 'function' && define.amd ? define(['exports'], factory) :\n (global = typeof globalThis !== 'undefined' ? globalThis : global || self, factory(global.WebStreamsPolyfill = {}));\n}(this, (function (exports) { 'use strict';\n\n /// \n const SymbolPolyfill = typeof Symbol === 'function' && typeof Symbol.iterator === 'symbol' ?\n Symbol :\n description => `Symbol(${description})`;\n\n /// \n function noop() {\n return undefined;\n }\n function getGlobals() {\n if (typeof self !== 'undefined') {\n return self;\n }\n else if (typeof window !== 'undefined') {\n return window;\n }\n else if (typeof global !== 'undefined') {\n return global;\n }\n return undefined;\n }\n const globals = getGlobals();\n\n function typeIsObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n const rethrowAssertionErrorRejection = noop;\n\n const originalPromise = Promise;\n const originalPromiseThen = Promise.prototype.then;\n const originalPromiseResolve = Promise.resolve.bind(originalPromise);\n const originalPromiseReject = Promise.reject.bind(originalPromise);\n function newPromise(executor) {\n return new originalPromise(executor);\n }\n function promiseResolvedWith(value) {\n return originalPromiseResolve(value);\n }\n function promiseRejectedWith(reason) {\n return originalPromiseReject(reason);\n }\n function PerformPromiseThen(promise, onFulfilled, onRejected) {\n // There doesn't appear to be any way to correctly emulate the behaviour from JavaScript, so this is just an\n // approximation.\n return originalPromiseThen.call(promise, onFulfilled, onRejected);\n }\n function uponPromise(promise, onFulfilled, onRejected) {\n PerformPromiseThen(PerformPromiseThen(promise, onFulfilled, onRejected), undefined, rethrowAssertionErrorRejection);\n }\n function uponFulfillment(promise, onFulfilled) {\n uponPromise(promise, onFulfilled);\n }\n function uponRejection(promise, onRejected) {\n uponPromise(promise, undefined, onRejected);\n }\n function transformPromiseWith(promise, fulfillmentHandler, rejectionHandler) {\n return PerformPromiseThen(promise, fulfillmentHandler, rejectionHandler);\n }\n function setPromiseIsHandledToTrue(promise) {\n PerformPromiseThen(promise, undefined, rethrowAssertionErrorRejection);\n }\n const queueMicrotask = (() => {\n const globalQueueMicrotask = globals && globals.queueMicrotask;\n if (typeof globalQueueMicrotask === 'function') {\n return globalQueueMicrotask;\n }\n const resolvedPromise = promiseResolvedWith(undefined);\n return (fn) => PerformPromiseThen(resolvedPromise, fn);\n })();\n function reflectCall(F, V, args) {\n if (typeof F !== 'function') {\n throw new TypeError('Argument is not a function');\n }\n return Function.prototype.apply.call(F, V, args);\n }\n function promiseCall(F, V, args) {\n try {\n return promiseResolvedWith(reflectCall(F, V, args));\n }\n catch (value) {\n return promiseRejectedWith(value);\n }\n }\n\n // Original from Chromium\n // https://chromium.googlesource.com/chromium/src/+/0aee4434a4dba42a42abaea9bfbc0cd196a63bc1/third_party/blink/renderer/core/streams/SimpleQueue.js\n const QUEUE_MAX_ARRAY_SIZE = 16384;\n /**\n * Simple queue structure.\n *\n * Avoids scalability issues with using a packed array directly by using\n * multiple arrays in a linked list and keeping the array size bounded.\n */\n class SimpleQueue {\n constructor() {\n this._cursor = 0;\n this._size = 0;\n // _front and _back are always defined.\n this._front = {\n _elements: [],\n _next: undefined\n };\n this._back = this._front;\n // The cursor is used to avoid calling Array.shift().\n // It contains the index of the front element of the array inside the\n // front-most node. It is always in the range [0, QUEUE_MAX_ARRAY_SIZE).\n this._cursor = 0;\n // When there is only one node, size === elements.length - cursor.\n this._size = 0;\n }\n get length() {\n return this._size;\n }\n // For exception safety, this method is structured in order:\n // 1. Read state\n // 2. Calculate required state mutations\n // 3. Perform state mutations\n push(element) {\n const oldBack = this._back;\n let newBack = oldBack;\n if (oldBack._elements.length === QUEUE_MAX_ARRAY_SIZE - 1) {\n newBack = {\n _elements: [],\n _next: undefined\n };\n }\n // push() is the mutation most likely to throw an exception, so it\n // goes first.\n oldBack._elements.push(element);\n if (newBack !== oldBack) {\n this._back = newBack;\n oldBack._next = newBack;\n }\n ++this._size;\n }\n // Like push(), shift() follows the read -> calculate -> mutate pattern for\n // exception safety.\n shift() { // must not be called on an empty queue\n const oldFront = this._front;\n let newFront = oldFront;\n const oldCursor = this._cursor;\n let newCursor = oldCursor + 1;\n const elements = oldFront._elements;\n const element = elements[oldCursor];\n if (newCursor === QUEUE_MAX_ARRAY_SIZE) {\n newFront = oldFront._next;\n newCursor = 0;\n }\n // No mutations before this point.\n --this._size;\n this._cursor = newCursor;\n if (oldFront !== newFront) {\n this._front = newFront;\n }\n // Permit shifted element to be garbage collected.\n elements[oldCursor] = undefined;\n return element;\n }\n // The tricky thing about forEach() is that it can be called\n // re-entrantly. The queue may be mutated inside the callback. It is easy to\n // see that push() within the callback has no negative effects since the end\n // of the queue is checked for on every iteration. If shift() is called\n // repeatedly within the callback then the next iteration may return an\n // element that has been removed. In this case the callback will be called\n // with undefined values until we either \"catch up\" with elements that still\n // exist or reach the back of the queue.\n forEach(callback) {\n let i = this._cursor;\n let node = this._front;\n let elements = node._elements;\n while (i !== elements.length || node._next !== undefined) {\n if (i === elements.length) {\n node = node._next;\n elements = node._elements;\n i = 0;\n if (elements.length === 0) {\n break;\n }\n }\n callback(elements[i]);\n ++i;\n }\n }\n // Return the element that would be returned if shift() was called now,\n // without modifying the queue.\n peek() { // must not be called on an empty queue\n const front = this._front;\n const cursor = this._cursor;\n return front._elements[cursor];\n }\n }\n\n function ReadableStreamReaderGenericInitialize(reader, stream) {\n reader._ownerReadableStream = stream;\n stream._reader = reader;\n if (stream._state === 'readable') {\n defaultReaderClosedPromiseInitialize(reader);\n }\n else if (stream._state === 'closed') {\n defaultReaderClosedPromiseInitializeAsResolved(reader);\n }\n else {\n defaultReaderClosedPromiseInitializeAsRejected(reader, stream._storedError);\n }\n }\n // A client of ReadableStreamDefaultReader and ReadableStreamBYOBReader may use these functions directly to bypass state\n // check.\n function ReadableStreamReaderGenericCancel(reader, reason) {\n const stream = reader._ownerReadableStream;\n return ReadableStreamCancel(stream, reason);\n }\n function ReadableStreamReaderGenericRelease(reader) {\n if (reader._ownerReadableStream._state === 'readable') {\n defaultReaderClosedPromiseReject(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n else {\n defaultReaderClosedPromiseResetToRejected(reader, new TypeError(`Reader was released and can no longer be used to monitor the stream's closedness`));\n }\n reader._ownerReadableStream._reader = undefined;\n reader._ownerReadableStream = undefined;\n }\n // Helper functions for the readers.\n function readerLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released reader');\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderClosedPromiseInitialize(reader) {\n reader._closedPromise = newPromise((resolve, reject) => {\n reader._closedPromise_resolve = resolve;\n reader._closedPromise_reject = reject;\n });\n }\n function defaultReaderClosedPromiseInitializeAsRejected(reader, reason) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseReject(reader, reason);\n }\n function defaultReaderClosedPromiseInitializeAsResolved(reader) {\n defaultReaderClosedPromiseInitialize(reader);\n defaultReaderClosedPromiseResolve(reader);\n }\n function defaultReaderClosedPromiseReject(reader, reason) {\n if (reader._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(reader._closedPromise);\n reader._closedPromise_reject(reason);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n function defaultReaderClosedPromiseResetToRejected(reader, reason) {\n defaultReaderClosedPromiseInitializeAsRejected(reader, reason);\n }\n function defaultReaderClosedPromiseResolve(reader) {\n if (reader._closedPromise_resolve === undefined) {\n return;\n }\n reader._closedPromise_resolve(undefined);\n reader._closedPromise_resolve = undefined;\n reader._closedPromise_reject = undefined;\n }\n\n const AbortSteps = SymbolPolyfill('[[AbortSteps]]');\n const ErrorSteps = SymbolPolyfill('[[ErrorSteps]]');\n const CancelSteps = SymbolPolyfill('[[CancelSteps]]');\n const PullSteps = SymbolPolyfill('[[PullSteps]]');\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isFinite#Polyfill\n const NumberIsFinite = Number.isFinite || function (x) {\n return typeof x === 'number' && isFinite(x);\n };\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Math/trunc#Polyfill\n const MathTrunc = Math.trunc || function (v) {\n return v < 0 ? Math.ceil(v) : Math.floor(v);\n };\n\n // https://heycam.github.io/webidl/#idl-dictionaries\n function isDictionary(x) {\n return typeof x === 'object' || typeof x === 'function';\n }\n function assertDictionary(obj, context) {\n if (obj !== undefined && !isDictionary(obj)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-callback-functions\n function assertFunction(x, context) {\n if (typeof x !== 'function') {\n throw new TypeError(`${context} is not a function.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-object\n function isObject(x) {\n return (typeof x === 'object' && x !== null) || typeof x === 'function';\n }\n function assertObject(x, context) {\n if (!isObject(x)) {\n throw new TypeError(`${context} is not an object.`);\n }\n }\n function assertRequiredArgument(x, position, context) {\n if (x === undefined) {\n throw new TypeError(`Parameter ${position} is required in '${context}'.`);\n }\n }\n function assertRequiredField(x, field, context) {\n if (x === undefined) {\n throw new TypeError(`${field} is required in '${context}'.`);\n }\n }\n // https://heycam.github.io/webidl/#idl-unrestricted-double\n function convertUnrestrictedDouble(value) {\n return Number(value);\n }\n function censorNegativeZero(x) {\n return x === 0 ? 0 : x;\n }\n function integerPart(x) {\n return censorNegativeZero(MathTrunc(x));\n }\n // https://heycam.github.io/webidl/#idl-unsigned-long-long\n function convertUnsignedLongLongWithEnforceRange(value, context) {\n const lowerBound = 0;\n const upperBound = Number.MAX_SAFE_INTEGER;\n let x = Number(value);\n x = censorNegativeZero(x);\n if (!NumberIsFinite(x)) {\n throw new TypeError(`${context} is not a finite number`);\n }\n x = integerPart(x);\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(`${context} is outside the accepted range of ${lowerBound} to ${upperBound}, inclusive`);\n }\n if (!NumberIsFinite(x) || x === 0) {\n return 0;\n }\n // TODO Use BigInt if supported?\n // let xBigInt = BigInt(integerPart(x));\n // xBigInt = BigInt.asUintN(64, xBigInt);\n // return Number(xBigInt);\n return x;\n }\n\n function assertReadableStream(x, context) {\n if (!IsReadableStream(x)) {\n throw new TypeError(`${context} is not a ReadableStream.`);\n }\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamDefaultReader(stream) {\n return new ReadableStreamDefaultReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadRequest(stream, readRequest) {\n stream._reader._readRequests.push(readRequest);\n }\n function ReadableStreamFulfillReadRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readRequest = reader._readRequests.shift();\n if (done) {\n readRequest._closeSteps();\n }\n else {\n readRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadRequests(stream) {\n return stream._reader._readRequests.length;\n }\n function ReadableStreamHasDefaultReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamDefaultReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A default reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamDefaultReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamDefaultReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed,\n * or rejected if the stream ever errors or the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Returns a promise that allows access to the next chunk from the stream's internal queue, if available.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read() {\n if (!IsReadableStreamDefaultReader(this)) {\n return promiseRejectedWith(defaultReaderBrandCheckException('read'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: () => resolvePromise({ value: undefined, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamDefaultReaderRead(this, readRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamDefaultReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamDefaultReader(this)) {\n throw defaultReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamDefaultReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamDefaultReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readRequests')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultReader;\n }\n function ReadableStreamDefaultReaderRead(reader, readRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'closed') {\n readRequest._closeSteps();\n }\n else if (stream._state === 'errored') {\n readRequest._errorSteps(stream._storedError);\n }\n else {\n stream._readableStreamController[PullSteps](readRequest);\n }\n }\n // Helper functions for the ReadableStreamDefaultReader.\n function defaultReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamDefaultReader.prototype.${name} can only be used on a ReadableStreamDefaultReader`);\n }\n\n /// \n /* eslint-disable @typescript-eslint/no-empty-function */\n const AsyncIteratorPrototype = Object.getPrototypeOf(Object.getPrototypeOf(async function* () { }).prototype);\n\n /// \n class ReadableStreamAsyncIteratorImpl {\n constructor(reader, preventCancel) {\n this._ongoingPromise = undefined;\n this._isFinished = false;\n this._reader = reader;\n this._preventCancel = preventCancel;\n }\n next() {\n const nextSteps = () => this._nextSteps();\n this._ongoingPromise = this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, nextSteps, nextSteps) :\n nextSteps();\n return this._ongoingPromise;\n }\n return(value) {\n const returnSteps = () => this._returnSteps(value);\n return this._ongoingPromise ?\n transformPromiseWith(this._ongoingPromise, returnSteps, returnSteps) :\n returnSteps();\n }\n _nextSteps() {\n if (this._isFinished) {\n return Promise.resolve({ value: undefined, done: true });\n }\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('iterate'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readRequest = {\n _chunkSteps: chunk => {\n this._ongoingPromise = undefined;\n // This needs to be delayed by one microtask, otherwise we stop pulling too early which breaks a test.\n // FIXME Is this a bug in the specification, or in the test?\n queueMicrotask(() => resolvePromise({ value: chunk, done: false }));\n },\n _closeSteps: () => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n resolvePromise({ value: undefined, done: true });\n },\n _errorSteps: reason => {\n this._ongoingPromise = undefined;\n this._isFinished = true;\n ReadableStreamReaderGenericRelease(reader);\n rejectPromise(reason);\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promise;\n }\n _returnSteps(value) {\n if (this._isFinished) {\n return Promise.resolve({ value, done: true });\n }\n this._isFinished = true;\n const reader = this._reader;\n if (reader._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('finish iterating'));\n }\n if (!this._preventCancel) {\n const result = ReadableStreamReaderGenericCancel(reader, value);\n ReadableStreamReaderGenericRelease(reader);\n return transformPromiseWith(result, () => ({ value, done: true }));\n }\n ReadableStreamReaderGenericRelease(reader);\n return promiseResolvedWith({ value, done: true });\n }\n }\n const ReadableStreamAsyncIteratorPrototype = {\n next() {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('next'));\n }\n return this._asyncIteratorImpl.next();\n },\n return(value) {\n if (!IsReadableStreamAsyncIterator(this)) {\n return promiseRejectedWith(streamAsyncIteratorBrandCheckException('return'));\n }\n return this._asyncIteratorImpl.return(value);\n }\n };\n if (AsyncIteratorPrototype !== undefined) {\n Object.setPrototypeOf(ReadableStreamAsyncIteratorPrototype, AsyncIteratorPrototype);\n }\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamAsyncIterator(stream, preventCancel) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n const impl = new ReadableStreamAsyncIteratorImpl(reader, preventCancel);\n const iterator = Object.create(ReadableStreamAsyncIteratorPrototype);\n iterator._asyncIteratorImpl = impl;\n return iterator;\n }\n function IsReadableStreamAsyncIterator(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_asyncIteratorImpl')) {\n return false;\n }\n try {\n // noinspection SuspiciousTypeOfGuard\n return x._asyncIteratorImpl instanceof\n ReadableStreamAsyncIteratorImpl;\n }\n catch (_a) {\n return false;\n }\n }\n // Helper functions for the ReadableStream.\n function streamAsyncIteratorBrandCheckException(name) {\n return new TypeError(`ReadableStreamAsyncIterator.${name} can only be used on a ReadableSteamAsyncIterator`);\n }\n\n /// \n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Number/isNaN#Polyfill\n const NumberIsNaN = Number.isNaN || function (x) {\n // eslint-disable-next-line no-self-compare\n return x !== x;\n };\n\n function CreateArrayFromList(elements) {\n // We use arrays to represent lists, so this is basically a no-op.\n // Do a slice though just in case we happen to depend on the unique-ness.\n return elements.slice();\n }\n function CopyDataBlockBytes(dest, destOffset, src, srcOffset, n) {\n new Uint8Array(dest).set(new Uint8Array(src, srcOffset, n), destOffset);\n }\n // Not implemented correctly\n function TransferArrayBuffer(O) {\n return O;\n }\n // Not implemented correctly\n // eslint-disable-next-line @typescript-eslint/no-unused-vars\n function IsDetachedBuffer(O) {\n return false;\n }\n function ArrayBufferSlice(buffer, begin, end) {\n // ArrayBuffer.prototype.slice is not available on IE10\n // https://www.caniuse.com/mdn-javascript_builtins_arraybuffer_slice\n if (buffer.slice) {\n return buffer.slice(begin, end);\n }\n const length = end - begin;\n const slice = new ArrayBuffer(length);\n CopyDataBlockBytes(slice, 0, buffer, begin, length);\n return slice;\n }\n\n function IsNonNegativeNumber(v) {\n if (typeof v !== 'number') {\n return false;\n }\n if (NumberIsNaN(v)) {\n return false;\n }\n if (v < 0) {\n return false;\n }\n return true;\n }\n function CloneAsUint8Array(O) {\n const buffer = ArrayBufferSlice(O.buffer, O.byteOffset, O.byteOffset + O.byteLength);\n return new Uint8Array(buffer);\n }\n\n function DequeueValue(container) {\n const pair = container._queue.shift();\n container._queueTotalSize -= pair.size;\n if (container._queueTotalSize < 0) {\n container._queueTotalSize = 0;\n }\n return pair.value;\n }\n function EnqueueValueWithSize(container, value, size) {\n if (!IsNonNegativeNumber(size) || size === Infinity) {\n throw new RangeError('Size must be a finite, non-NaN, non-negative number.');\n }\n container._queue.push({ value, size });\n container._queueTotalSize += size;\n }\n function PeekQueueValue(container) {\n const pair = container._queue.peek();\n return pair.value;\n }\n function ResetQueue(container) {\n container._queue = new SimpleQueue();\n container._queueTotalSize = 0;\n }\n\n /**\n * A pull-into request in a {@link ReadableByteStreamController}.\n *\n * @public\n */\n class ReadableStreamBYOBRequest {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the view for writing in to, or `null` if the BYOB request has already been responded to.\n */\n get view() {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('view');\n }\n return this._view;\n }\n respond(bytesWritten) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respond');\n }\n assertRequiredArgument(bytesWritten, 1, 'respond');\n bytesWritten = convertUnsignedLongLongWithEnforceRange(bytesWritten, 'First parameter');\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(this._view.buffer)) ;\n ReadableByteStreamControllerRespond(this._associatedReadableByteStreamController, bytesWritten);\n }\n respondWithNewView(view) {\n if (!IsReadableStreamBYOBRequest(this)) {\n throw byobRequestBrandCheckException('respondWithNewView');\n }\n assertRequiredArgument(view, 1, 'respondWithNewView');\n if (!ArrayBuffer.isView(view)) {\n throw new TypeError('You can only respond with array buffer views');\n }\n if (this._associatedReadableByteStreamController === undefined) {\n throw new TypeError('This BYOB request has been invalidated');\n }\n if (IsDetachedBuffer(view.buffer)) ;\n ReadableByteStreamControllerRespondWithNewView(this._associatedReadableByteStreamController, view);\n }\n }\n Object.defineProperties(ReadableStreamBYOBRequest.prototype, {\n respond: { enumerable: true },\n respondWithNewView: { enumerable: true },\n view: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBRequest.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBRequest',\n configurable: true\n });\n }\n /**\n * Allows control of a {@link ReadableStream | readable byte stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableByteStreamController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the current BYOB pull request, or `null` if there isn't one.\n */\n get byobRequest() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('byobRequest');\n }\n return ReadableByteStreamControllerGetBYOBRequest(this);\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying byte source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('desiredSize');\n }\n return ReadableByteStreamControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('close');\n }\n if (this._closeRequested) {\n throw new TypeError('The stream has already been closed; do not close it again!');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be closed`);\n }\n ReadableByteStreamControllerClose(this);\n }\n enqueue(chunk) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('enqueue');\n }\n assertRequiredArgument(chunk, 1, 'enqueue');\n if (!ArrayBuffer.isView(chunk)) {\n throw new TypeError('chunk must be an array buffer view');\n }\n if (chunk.byteLength === 0) {\n throw new TypeError('chunk must have non-zero byteLength');\n }\n if (chunk.buffer.byteLength === 0) {\n throw new TypeError(`chunk's buffer must have non-zero byteLength`);\n }\n if (this._closeRequested) {\n throw new TypeError('stream is closed or draining');\n }\n const state = this._controlledReadableByteStream._state;\n if (state !== 'readable') {\n throw new TypeError(`The stream (in ${state} state) is not in the readable state and cannot be enqueued to`);\n }\n ReadableByteStreamControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableByteStreamController(this)) {\n throw byteStreamControllerBrandCheckException('error');\n }\n ReadableByteStreamControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ReadableByteStreamControllerClearPendingPullIntos(this);\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableByteStreamControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableByteStream;\n if (this._queueTotalSize > 0) {\n const entry = this._queue.shift();\n this._queueTotalSize -= entry.byteLength;\n ReadableByteStreamControllerHandleQueueDrain(this);\n const view = new Uint8Array(entry.buffer, entry.byteOffset, entry.byteLength);\n readRequest._chunkSteps(view);\n return;\n }\n const autoAllocateChunkSize = this._autoAllocateChunkSize;\n if (autoAllocateChunkSize !== undefined) {\n let buffer;\n try {\n buffer = new ArrayBuffer(autoAllocateChunkSize);\n }\n catch (bufferE) {\n readRequest._errorSteps(bufferE);\n return;\n }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: autoAllocateChunkSize,\n byteOffset: 0,\n byteLength: autoAllocateChunkSize,\n bytesFilled: 0,\n elementSize: 1,\n viewConstructor: Uint8Array,\n readerType: 'default'\n };\n this._pendingPullIntos.push(pullIntoDescriptor);\n }\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableByteStreamControllerCallPullIfNeeded(this);\n }\n }\n Object.defineProperties(ReadableByteStreamController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n byobRequest: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableByteStreamController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableByteStreamController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableByteStreamController.\n function IsReadableByteStreamController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableByteStream')) {\n return false;\n }\n return x instanceof ReadableByteStreamController;\n }\n function IsReadableStreamBYOBRequest(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_associatedReadableByteStreamController')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBRequest;\n }\n function ReadableByteStreamControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableByteStreamControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n // TODO: Test controller argument\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableByteStreamControllerError(controller, e);\n });\n }\n function ReadableByteStreamControllerClearPendingPullIntos(controller) {\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n controller._pendingPullIntos = new SimpleQueue();\n }\n function ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor) {\n let done = false;\n if (stream._state === 'closed') {\n done = true;\n }\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n if (pullIntoDescriptor.readerType === 'default') {\n ReadableStreamFulfillReadRequest(stream, filledView, done);\n }\n else {\n ReadableStreamFulfillReadIntoRequest(stream, filledView, done);\n }\n }\n function ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor) {\n const bytesFilled = pullIntoDescriptor.bytesFilled;\n const elementSize = pullIntoDescriptor.elementSize;\n return new pullIntoDescriptor.viewConstructor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, bytesFilled / elementSize);\n }\n function ReadableByteStreamControllerEnqueueChunkToQueue(controller, buffer, byteOffset, byteLength) {\n controller._queue.push({ buffer, byteOffset, byteLength });\n controller._queueTotalSize += byteLength;\n }\n function ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor) {\n const elementSize = pullIntoDescriptor.elementSize;\n const currentAlignedBytes = pullIntoDescriptor.bytesFilled - pullIntoDescriptor.bytesFilled % elementSize;\n const maxBytesToCopy = Math.min(controller._queueTotalSize, pullIntoDescriptor.byteLength - pullIntoDescriptor.bytesFilled);\n const maxBytesFilled = pullIntoDescriptor.bytesFilled + maxBytesToCopy;\n const maxAlignedBytes = maxBytesFilled - maxBytesFilled % elementSize;\n let totalBytesToCopyRemaining = maxBytesToCopy;\n let ready = false;\n if (maxAlignedBytes > currentAlignedBytes) {\n totalBytesToCopyRemaining = maxAlignedBytes - pullIntoDescriptor.bytesFilled;\n ready = true;\n }\n const queue = controller._queue;\n while (totalBytesToCopyRemaining > 0) {\n const headOfQueue = queue.peek();\n const bytesToCopy = Math.min(totalBytesToCopyRemaining, headOfQueue.byteLength);\n const destStart = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n CopyDataBlockBytes(pullIntoDescriptor.buffer, destStart, headOfQueue.buffer, headOfQueue.byteOffset, bytesToCopy);\n if (headOfQueue.byteLength === bytesToCopy) {\n queue.shift();\n }\n else {\n headOfQueue.byteOffset += bytesToCopy;\n headOfQueue.byteLength -= bytesToCopy;\n }\n controller._queueTotalSize -= bytesToCopy;\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesToCopy, pullIntoDescriptor);\n totalBytesToCopyRemaining -= bytesToCopy;\n }\n return ready;\n }\n function ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, size, pullIntoDescriptor) {\n pullIntoDescriptor.bytesFilled += size;\n }\n function ReadableByteStreamControllerHandleQueueDrain(controller) {\n if (controller._queueTotalSize === 0 && controller._closeRequested) {\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(controller._controlledReadableByteStream);\n }\n else {\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n }\n function ReadableByteStreamControllerInvalidateBYOBRequest(controller) {\n if (controller._byobRequest === null) {\n return;\n }\n controller._byobRequest._associatedReadableByteStreamController = undefined;\n controller._byobRequest._view = null;\n controller._byobRequest = null;\n }\n function ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller) {\n while (controller._pendingPullIntos.length > 0) {\n if (controller._queueTotalSize === 0) {\n return;\n }\n const pullIntoDescriptor = controller._pendingPullIntos.peek();\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerPullInto(controller, view, readIntoRequest) {\n const stream = controller._controlledReadableByteStream;\n let elementSize = 1;\n if (view.constructor !== DataView) {\n elementSize = view.constructor.BYTES_PER_ELEMENT;\n }\n const ctor = view.constructor;\n // try {\n const buffer = TransferArrayBuffer(view.buffer);\n // } catch (e) {\n // readIntoRequest._errorSteps(e);\n // return;\n // }\n const pullIntoDescriptor = {\n buffer,\n bufferByteLength: buffer.byteLength,\n byteOffset: view.byteOffset,\n byteLength: view.byteLength,\n bytesFilled: 0,\n elementSize,\n viewConstructor: ctor,\n readerType: 'byob'\n };\n if (controller._pendingPullIntos.length > 0) {\n controller._pendingPullIntos.push(pullIntoDescriptor);\n // No ReadableByteStreamControllerCallPullIfNeeded() call since:\n // - No change happens on desiredSize\n // - The source has already been notified of that there's at least 1 pending read(view)\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n return;\n }\n if (stream._state === 'closed') {\n const emptyView = new ctor(pullIntoDescriptor.buffer, pullIntoDescriptor.byteOffset, 0);\n readIntoRequest._closeSteps(emptyView);\n return;\n }\n if (controller._queueTotalSize > 0) {\n if (ReadableByteStreamControllerFillPullIntoDescriptorFromQueue(controller, pullIntoDescriptor)) {\n const filledView = ReadableByteStreamControllerConvertPullIntoDescriptor(pullIntoDescriptor);\n ReadableByteStreamControllerHandleQueueDrain(controller);\n readIntoRequest._chunkSteps(filledView);\n return;\n }\n if (controller._closeRequested) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n readIntoRequest._errorSteps(e);\n return;\n }\n }\n controller._pendingPullIntos.push(pullIntoDescriptor);\n ReadableStreamAddReadIntoRequest(stream, readIntoRequest);\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerRespondInClosedState(controller, firstDescriptor) {\n const stream = controller._controlledReadableByteStream;\n if (ReadableStreamHasBYOBReader(stream)) {\n while (ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n const pullIntoDescriptor = ReadableByteStreamControllerShiftPendingPullInto(controller);\n ReadableByteStreamControllerCommitPullIntoDescriptor(stream, pullIntoDescriptor);\n }\n }\n }\n function ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, pullIntoDescriptor) {\n ReadableByteStreamControllerFillHeadPullIntoDescriptor(controller, bytesWritten, pullIntoDescriptor);\n if (pullIntoDescriptor.bytesFilled < pullIntoDescriptor.elementSize) {\n return;\n }\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n const remainderSize = pullIntoDescriptor.bytesFilled % pullIntoDescriptor.elementSize;\n if (remainderSize > 0) {\n const end = pullIntoDescriptor.byteOffset + pullIntoDescriptor.bytesFilled;\n const remainder = ArrayBufferSlice(pullIntoDescriptor.buffer, end - remainderSize, end);\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, remainder, 0, remainder.byteLength);\n }\n pullIntoDescriptor.bytesFilled -= remainderSize;\n ReadableByteStreamControllerCommitPullIntoDescriptor(controller._controlledReadableByteStream, pullIntoDescriptor);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n function ReadableByteStreamControllerRespondInternal(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n ReadableByteStreamControllerRespondInClosedState(controller);\n }\n else {\n ReadableByteStreamControllerRespondInReadableState(controller, bytesWritten, firstDescriptor);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerShiftPendingPullInto(controller) {\n const descriptor = controller._pendingPullIntos.shift();\n return descriptor;\n }\n function ReadableByteStreamControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return false;\n }\n if (controller._closeRequested) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (ReadableStreamHasDefaultReader(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n if (ReadableStreamHasBYOBReader(stream) && ReadableStreamGetNumReadIntoRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableByteStreamControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableByteStreamControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n }\n // A client of ReadableByteStreamController may use these functions directly to bypass state check.\n function ReadableByteStreamControllerClose(controller) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n if (controller._queueTotalSize > 0) {\n controller._closeRequested = true;\n return;\n }\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (firstPendingPullInto.bytesFilled > 0) {\n const e = new TypeError('Insufficient bytes to fill elements in the given buffer');\n ReadableByteStreamControllerError(controller, e);\n throw e;\n }\n }\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n function ReadableByteStreamControllerEnqueue(controller, chunk) {\n const stream = controller._controlledReadableByteStream;\n if (controller._closeRequested || stream._state !== 'readable') {\n return;\n }\n const buffer = chunk.buffer;\n const byteOffset = chunk.byteOffset;\n const byteLength = chunk.byteLength;\n const transferredBuffer = TransferArrayBuffer(buffer);\n if (controller._pendingPullIntos.length > 0) {\n const firstPendingPullInto = controller._pendingPullIntos.peek();\n if (IsDetachedBuffer(firstPendingPullInto.buffer)) ;\n firstPendingPullInto.buffer = TransferArrayBuffer(firstPendingPullInto.buffer);\n }\n ReadableByteStreamControllerInvalidateBYOBRequest(controller);\n if (ReadableStreamHasDefaultReader(stream)) {\n if (ReadableStreamGetNumReadRequests(stream) === 0) {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n else {\n if (controller._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerShiftPendingPullInto(controller);\n }\n const transferredView = new Uint8Array(transferredBuffer, byteOffset, byteLength);\n ReadableStreamFulfillReadRequest(stream, transferredView, false);\n }\n }\n else if (ReadableStreamHasBYOBReader(stream)) {\n // TODO: Ideally in this branch detaching should happen only if the buffer is not consumed fully.\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n ReadableByteStreamControllerProcessPullIntoDescriptorsUsingQueue(controller);\n }\n else {\n ReadableByteStreamControllerEnqueueChunkToQueue(controller, transferredBuffer, byteOffset, byteLength);\n }\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }\n function ReadableByteStreamControllerError(controller, e) {\n const stream = controller._controlledReadableByteStream;\n if (stream._state !== 'readable') {\n return;\n }\n ReadableByteStreamControllerClearPendingPullIntos(controller);\n ResetQueue(controller);\n ReadableByteStreamControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableByteStreamControllerGetBYOBRequest(controller) {\n if (controller._byobRequest === null && controller._pendingPullIntos.length > 0) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const view = new Uint8Array(firstDescriptor.buffer, firstDescriptor.byteOffset + firstDescriptor.bytesFilled, firstDescriptor.byteLength - firstDescriptor.bytesFilled);\n const byobRequest = Object.create(ReadableStreamBYOBRequest.prototype);\n SetUpReadableStreamBYOBRequest(byobRequest, controller, view);\n controller._byobRequest = byobRequest;\n }\n return controller._byobRequest;\n }\n function ReadableByteStreamControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableByteStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function ReadableByteStreamControllerRespond(controller, bytesWritten) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (bytesWritten !== 0) {\n throw new TypeError('bytesWritten must be 0 when calling respond() on a closed stream');\n }\n }\n else {\n if (bytesWritten === 0) {\n throw new TypeError('bytesWritten must be greater than 0 when calling respond() on a readable stream');\n }\n if (firstDescriptor.bytesFilled + bytesWritten > firstDescriptor.byteLength) {\n throw new RangeError('bytesWritten out of range');\n }\n }\n firstDescriptor.buffer = TransferArrayBuffer(firstDescriptor.buffer);\n ReadableByteStreamControllerRespondInternal(controller, bytesWritten);\n }\n function ReadableByteStreamControllerRespondWithNewView(controller, view) {\n const firstDescriptor = controller._pendingPullIntos.peek();\n const state = controller._controlledReadableByteStream._state;\n if (state === 'closed') {\n if (view.byteLength !== 0) {\n throw new TypeError('The view\\'s length must be 0 when calling respondWithNewView() on a closed stream');\n }\n }\n else {\n if (view.byteLength === 0) {\n throw new TypeError('The view\\'s length must be greater than 0 when calling respondWithNewView() on a readable stream');\n }\n }\n if (firstDescriptor.byteOffset + firstDescriptor.bytesFilled !== view.byteOffset) {\n throw new RangeError('The region specified by view does not match byobRequest');\n }\n if (firstDescriptor.bufferByteLength !== view.buffer.byteLength) {\n throw new RangeError('The buffer of view has different capacity than byobRequest');\n }\n if (firstDescriptor.bytesFilled + view.byteLength > firstDescriptor.byteLength) {\n throw new RangeError('The region specified by view is larger than byobRequest');\n }\n const viewByteLength = view.byteLength;\n firstDescriptor.buffer = TransferArrayBuffer(view.buffer);\n ReadableByteStreamControllerRespondInternal(controller, viewByteLength);\n }\n function SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize) {\n controller._controlledReadableByteStream = stream;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._byobRequest = null;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._closeRequested = false;\n controller._started = false;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n controller._autoAllocateChunkSize = autoAllocateChunkSize;\n controller._pendingPullIntos = new SimpleQueue();\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableByteStreamControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableByteStreamControllerError(controller, r);\n });\n }\n function SetUpReadableByteStreamControllerFromUnderlyingSource(stream, underlyingByteSource, highWaterMark) {\n const controller = Object.create(ReadableByteStreamController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingByteSource.start !== undefined) {\n startAlgorithm = () => underlyingByteSource.start(controller);\n }\n if (underlyingByteSource.pull !== undefined) {\n pullAlgorithm = () => underlyingByteSource.pull(controller);\n }\n if (underlyingByteSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingByteSource.cancel(reason);\n }\n const autoAllocateChunkSize = underlyingByteSource.autoAllocateChunkSize;\n if (autoAllocateChunkSize === 0) {\n throw new TypeError('autoAllocateChunkSize must be greater than 0');\n }\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, autoAllocateChunkSize);\n }\n function SetUpReadableStreamBYOBRequest(request, controller, view) {\n request._associatedReadableByteStreamController = controller;\n request._view = view;\n }\n // Helper functions for the ReadableStreamBYOBRequest.\n function byobRequestBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBRequest.prototype.${name} can only be used on a ReadableStreamBYOBRequest`);\n }\n // Helper functions for the ReadableByteStreamController.\n function byteStreamControllerBrandCheckException(name) {\n return new TypeError(`ReadableByteStreamController.prototype.${name} can only be used on a ReadableByteStreamController`);\n }\n\n // Abstract operations for the ReadableStream.\n function AcquireReadableStreamBYOBReader(stream) {\n return new ReadableStreamBYOBReader(stream);\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamAddReadIntoRequest(stream, readIntoRequest) {\n stream._reader._readIntoRequests.push(readIntoRequest);\n }\n function ReadableStreamFulfillReadIntoRequest(stream, chunk, done) {\n const reader = stream._reader;\n const readIntoRequest = reader._readIntoRequests.shift();\n if (done) {\n readIntoRequest._closeSteps(chunk);\n }\n else {\n readIntoRequest._chunkSteps(chunk);\n }\n }\n function ReadableStreamGetNumReadIntoRequests(stream) {\n return stream._reader._readIntoRequests.length;\n }\n function ReadableStreamHasBYOBReader(stream) {\n const reader = stream._reader;\n if (reader === undefined) {\n return false;\n }\n if (!IsReadableStreamBYOBReader(reader)) {\n return false;\n }\n return true;\n }\n /**\n * A BYOB reader vended by a {@link ReadableStream}.\n *\n * @public\n */\n class ReadableStreamBYOBReader {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'ReadableStreamBYOBReader');\n assertReadableStream(stream, 'First parameter');\n if (IsReadableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive reading by another reader');\n }\n if (!IsReadableByteStreamController(stream._readableStreamController)) {\n throw new TypeError('Cannot construct a ReadableStreamBYOBReader for a stream not constructed with a byte ' +\n 'source');\n }\n ReadableStreamReaderGenericInitialize(this, stream);\n this._readIntoRequests = new SimpleQueue();\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the reader's lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link ReadableStream.cancel | stream.cancel(reason)}.\n */\n cancel(reason = undefined) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('cancel'));\n }\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('cancel'));\n }\n return ReadableStreamReaderGenericCancel(this, reason);\n }\n /**\n * Attempts to reads bytes into view, and returns a promise resolved with the result.\n *\n * If reading a chunk causes the queue to become empty, more data will be pulled from the underlying source.\n */\n read(view) {\n if (!IsReadableStreamBYOBReader(this)) {\n return promiseRejectedWith(byobReaderBrandCheckException('read'));\n }\n if (!ArrayBuffer.isView(view)) {\n return promiseRejectedWith(new TypeError('view must be an array buffer view'));\n }\n if (view.byteLength === 0) {\n return promiseRejectedWith(new TypeError('view must have non-zero byteLength'));\n }\n if (view.buffer.byteLength === 0) {\n return promiseRejectedWith(new TypeError(`view's buffer must have non-zero byteLength`));\n }\n if (IsDetachedBuffer(view.buffer)) ;\n if (this._ownerReadableStream === undefined) {\n return promiseRejectedWith(readerLockException('read from'));\n }\n let resolvePromise;\n let rejectPromise;\n const promise = newPromise((resolve, reject) => {\n resolvePromise = resolve;\n rejectPromise = reject;\n });\n const readIntoRequest = {\n _chunkSteps: chunk => resolvePromise({ value: chunk, done: false }),\n _closeSteps: chunk => resolvePromise({ value: chunk, done: true }),\n _errorSteps: e => rejectPromise(e)\n };\n ReadableStreamBYOBReaderRead(this, view, readIntoRequest);\n return promise;\n }\n /**\n * Releases the reader's lock on the corresponding stream. After the lock is released, the reader is no longer active.\n * If the associated stream is errored when the lock is released, the reader will appear errored in the same way\n * from now on; otherwise, the reader will appear closed.\n *\n * A reader's lock cannot be released while it still has a pending read request, i.e., if a promise returned by\n * the reader's {@link ReadableStreamBYOBReader.read | read()} method has not yet been settled. Attempting to\n * do so will throw a `TypeError` and leave the reader locked to the stream.\n */\n releaseLock() {\n if (!IsReadableStreamBYOBReader(this)) {\n throw byobReaderBrandCheckException('releaseLock');\n }\n if (this._ownerReadableStream === undefined) {\n return;\n }\n if (this._readIntoRequests.length > 0) {\n throw new TypeError('Tried to release a reader lock when that reader has pending read() calls un-settled');\n }\n ReadableStreamReaderGenericRelease(this);\n }\n }\n Object.defineProperties(ReadableStreamBYOBReader.prototype, {\n cancel: { enumerable: true },\n read: { enumerable: true },\n releaseLock: { enumerable: true },\n closed: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamBYOBReader.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamBYOBReader',\n configurable: true\n });\n }\n // Abstract operations for the readers.\n function IsReadableStreamBYOBReader(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readIntoRequests')) {\n return false;\n }\n return x instanceof ReadableStreamBYOBReader;\n }\n function ReadableStreamBYOBReaderRead(reader, view, readIntoRequest) {\n const stream = reader._ownerReadableStream;\n stream._disturbed = true;\n if (stream._state === 'errored') {\n readIntoRequest._errorSteps(stream._storedError);\n }\n else {\n ReadableByteStreamControllerPullInto(stream._readableStreamController, view, readIntoRequest);\n }\n }\n // Helper functions for the ReadableStreamBYOBReader.\n function byobReaderBrandCheckException(name) {\n return new TypeError(`ReadableStreamBYOBReader.prototype.${name} can only be used on a ReadableStreamBYOBReader`);\n }\n\n function ExtractHighWaterMark(strategy, defaultHWM) {\n const { highWaterMark } = strategy;\n if (highWaterMark === undefined) {\n return defaultHWM;\n }\n if (NumberIsNaN(highWaterMark) || highWaterMark < 0) {\n throw new RangeError('Invalid highWaterMark');\n }\n return highWaterMark;\n }\n function ExtractSizeAlgorithm(strategy) {\n const { size } = strategy;\n if (!size) {\n return () => 1;\n }\n return size;\n }\n\n function convertQueuingStrategy(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n const size = init === null || init === void 0 ? void 0 : init.size;\n return {\n highWaterMark: highWaterMark === undefined ? undefined : convertUnrestrictedDouble(highWaterMark),\n size: size === undefined ? undefined : convertQueuingStrategySize(size, `${context} has member 'size' that`)\n };\n }\n function convertQueuingStrategySize(fn, context) {\n assertFunction(fn, context);\n return chunk => convertUnrestrictedDouble(fn(chunk));\n }\n\n function convertUnderlyingSink(original, context) {\n assertDictionary(original, context);\n const abort = original === null || original === void 0 ? void 0 : original.abort;\n const close = original === null || original === void 0 ? void 0 : original.close;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n const write = original === null || original === void 0 ? void 0 : original.write;\n return {\n abort: abort === undefined ?\n undefined :\n convertUnderlyingSinkAbortCallback(abort, original, `${context} has member 'abort' that`),\n close: close === undefined ?\n undefined :\n convertUnderlyingSinkCloseCallback(close, original, `${context} has member 'close' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSinkStartCallback(start, original, `${context} has member 'start' that`),\n write: write === undefined ?\n undefined :\n convertUnderlyingSinkWriteCallback(write, original, `${context} has member 'write' that`),\n type\n };\n }\n function convertUnderlyingSinkAbortCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSinkCloseCallback(fn, original, context) {\n assertFunction(fn, context);\n return () => promiseCall(fn, original, []);\n }\n function convertUnderlyingSinkStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertUnderlyingSinkWriteCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n function assertWritableStream(x, context) {\n if (!IsWritableStream(x)) {\n throw new TypeError(`${context} is not a WritableStream.`);\n }\n }\n\n function isAbortSignal(value) {\n if (typeof value !== 'object' || value === null) {\n return false;\n }\n try {\n return typeof value.aborted === 'boolean';\n }\n catch (_a) {\n // AbortSignal.prototype.aborted throws if its brand check fails\n return false;\n }\n }\n const supportsAbortController = typeof AbortController === 'function';\n /**\n * Construct a new AbortController, if supported by the platform.\n *\n * @internal\n */\n function createAbortController() {\n if (supportsAbortController) {\n return new AbortController();\n }\n return undefined;\n }\n\n /**\n * A writable stream represents a destination for data, into which you can write.\n *\n * @public\n */\n class WritableStream {\n constructor(rawUnderlyingSink = {}, rawStrategy = {}) {\n if (rawUnderlyingSink === undefined) {\n rawUnderlyingSink = null;\n }\n else {\n assertObject(rawUnderlyingSink, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSink = convertUnderlyingSink(rawUnderlyingSink, 'First parameter');\n InitializeWritableStream(this);\n const type = underlyingSink.type;\n if (type !== undefined) {\n throw new RangeError('Invalid type is specified');\n }\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpWritableStreamDefaultControllerFromUnderlyingSink(this, underlyingSink, highWaterMark, sizeAlgorithm);\n }\n /**\n * Returns whether or not the writable stream is locked to a writer.\n */\n get locked() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('locked');\n }\n return IsWritableStreamLocked(this);\n }\n /**\n * Aborts the stream, signaling that the producer can no longer successfully write to the stream and it is to be\n * immediately moved to an errored state, with any queued-up writes discarded. This will also execute any abort\n * mechanism of the underlying sink.\n *\n * The returned promise will fulfill if the stream shuts down successfully, or reject if the underlying sink signaled\n * that there was an error doing so. Additionally, it will reject with a `TypeError` (without attempting to cancel\n * the stream) if the stream is currently locked.\n */\n abort(reason = undefined) {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('abort'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot abort a stream that already has a writer'));\n }\n return WritableStreamAbort(this, reason);\n }\n /**\n * Closes the stream. The underlying sink will finish processing any previously-written chunks, before invoking its\n * close behavior. During this time any further attempts to write will fail (without erroring the stream).\n *\n * The method returns a promise that will fulfill if all remaining chunks are successfully written and the stream\n * successfully closes, or rejects if an error is encountered during this process. Additionally, it will reject with\n * a `TypeError` (without attempting to cancel the stream) if the stream is currently locked.\n */\n close() {\n if (!IsWritableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$2('close'));\n }\n if (IsWritableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot close a stream that already has a writer'));\n }\n if (WritableStreamCloseQueuedOrInFlight(this)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamClose(this);\n }\n /**\n * Creates a {@link WritableStreamDefaultWriter | writer} and locks the stream to the new writer. While the stream\n * is locked, no other writer can be acquired until this one is released.\n *\n * This functionality is especially useful for creating abstractions that desire the ability to write to a stream\n * without interruption or interleaving. By getting a writer for the stream, you can ensure nobody else can write at\n * the same time, which would cause the resulting written data to be unpredictable and probably useless.\n */\n getWriter() {\n if (!IsWritableStream(this)) {\n throw streamBrandCheckException$2('getWriter');\n }\n return AcquireWritableStreamDefaultWriter(this);\n }\n }\n Object.defineProperties(WritableStream.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n getWriter: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStream',\n configurable: true\n });\n }\n // Abstract operations for the WritableStream.\n function AcquireWritableStreamDefaultWriter(stream) {\n return new WritableStreamDefaultWriter(stream);\n }\n // Throws if and only if startAlgorithm throws.\n function CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(WritableStream.prototype);\n InitializeWritableStream(stream);\n const controller = Object.create(WritableStreamDefaultController.prototype);\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n function InitializeWritableStream(stream) {\n stream._state = 'writable';\n // The error that will be reported by new method calls once the state becomes errored. Only set when [[state]] is\n // 'erroring' or 'errored'. May be set to an undefined value.\n stream._storedError = undefined;\n stream._writer = undefined;\n // Initialize to undefined first because the constructor of the controller checks this\n // variable to validate the caller.\n stream._writableStreamController = undefined;\n // This queue is placed here instead of the writer class in order to allow for passing a writer to the next data\n // producer without waiting for the queued writes to finish.\n stream._writeRequests = new SimpleQueue();\n // Write requests are removed from _writeRequests when write() is called on the underlying sink. This prevents\n // them from being erroneously rejected on error. If a write() call is in-flight, the request is stored here.\n stream._inFlightWriteRequest = undefined;\n // The promise that was returned from writer.close(). Stored here because it may be fulfilled after the writer\n // has been detached.\n stream._closeRequest = undefined;\n // Close request is removed from _closeRequest when close() is called on the underlying sink. This prevents it\n // from being erroneously rejected on error. If a close() call is in-flight, the request is stored here.\n stream._inFlightCloseRequest = undefined;\n // The promise that was returned from writer.abort(). This may also be fulfilled after the writer has detached.\n stream._pendingAbortRequest = undefined;\n // The backpressure signal set by the controller.\n stream._backpressure = false;\n }\n function IsWritableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_writableStreamController')) {\n return false;\n }\n return x instanceof WritableStream;\n }\n function IsWritableStreamLocked(stream) {\n if (stream._writer === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamAbort(stream, reason) {\n var _a;\n if (stream._state === 'closed' || stream._state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n stream._writableStreamController._abortReason = reason;\n (_a = stream._writableStreamController._abortController) === null || _a === void 0 ? void 0 : _a.abort();\n // TypeScript narrows the type of `stream._state` down to 'writable' | 'erroring',\n // but it doesn't know that signaling abort runs author code that might have changed the state.\n // Widen the type again by casting to WritableStreamState.\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseResolvedWith(undefined);\n }\n if (stream._pendingAbortRequest !== undefined) {\n return stream._pendingAbortRequest._promise;\n }\n let wasAlreadyErroring = false;\n if (state === 'erroring') {\n wasAlreadyErroring = true;\n // reason will not be used, so don't keep a reference to it.\n reason = undefined;\n }\n const promise = newPromise((resolve, reject) => {\n stream._pendingAbortRequest = {\n _promise: undefined,\n _resolve: resolve,\n _reject: reject,\n _reason: reason,\n _wasAlreadyErroring: wasAlreadyErroring\n };\n });\n stream._pendingAbortRequest._promise = promise;\n if (!wasAlreadyErroring) {\n WritableStreamStartErroring(stream, reason);\n }\n return promise;\n }\n function WritableStreamClose(stream) {\n const state = stream._state;\n if (state === 'closed' || state === 'errored') {\n return promiseRejectedWith(new TypeError(`The stream (in ${state} state) is not in the writable state and cannot be closed`));\n }\n const promise = newPromise((resolve, reject) => {\n const closeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._closeRequest = closeRequest;\n });\n const writer = stream._writer;\n if (writer !== undefined && stream._backpressure && state === 'writable') {\n defaultWriterReadyPromiseResolve(writer);\n }\n WritableStreamDefaultControllerClose(stream._writableStreamController);\n return promise;\n }\n // WritableStream API exposed for controllers.\n function WritableStreamAddWriteRequest(stream) {\n const promise = newPromise((resolve, reject) => {\n const writeRequest = {\n _resolve: resolve,\n _reject: reject\n };\n stream._writeRequests.push(writeRequest);\n });\n return promise;\n }\n function WritableStreamDealWithRejection(stream, error) {\n const state = stream._state;\n if (state === 'writable') {\n WritableStreamStartErroring(stream, error);\n return;\n }\n WritableStreamFinishErroring(stream);\n }\n function WritableStreamStartErroring(stream, reason) {\n const controller = stream._writableStreamController;\n stream._state = 'erroring';\n stream._storedError = reason;\n const writer = stream._writer;\n if (writer !== undefined) {\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, reason);\n }\n if (!WritableStreamHasOperationMarkedInFlight(stream) && controller._started) {\n WritableStreamFinishErroring(stream);\n }\n }\n function WritableStreamFinishErroring(stream) {\n stream._state = 'errored';\n stream._writableStreamController[ErrorSteps]();\n const storedError = stream._storedError;\n stream._writeRequests.forEach(writeRequest => {\n writeRequest._reject(storedError);\n });\n stream._writeRequests = new SimpleQueue();\n if (stream._pendingAbortRequest === undefined) {\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const abortRequest = stream._pendingAbortRequest;\n stream._pendingAbortRequest = undefined;\n if (abortRequest._wasAlreadyErroring) {\n abortRequest._reject(storedError);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n return;\n }\n const promise = stream._writableStreamController[AbortSteps](abortRequest._reason);\n uponPromise(promise, () => {\n abortRequest._resolve();\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n }, (reason) => {\n abortRequest._reject(reason);\n WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream);\n });\n }\n function WritableStreamFinishInFlightWrite(stream) {\n stream._inFlightWriteRequest._resolve(undefined);\n stream._inFlightWriteRequest = undefined;\n }\n function WritableStreamFinishInFlightWriteWithError(stream, error) {\n stream._inFlightWriteRequest._reject(error);\n stream._inFlightWriteRequest = undefined;\n WritableStreamDealWithRejection(stream, error);\n }\n function WritableStreamFinishInFlightClose(stream) {\n stream._inFlightCloseRequest._resolve(undefined);\n stream._inFlightCloseRequest = undefined;\n const state = stream._state;\n if (state === 'erroring') {\n // The error was too late to do anything, so it is ignored.\n stream._storedError = undefined;\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._resolve();\n stream._pendingAbortRequest = undefined;\n }\n }\n stream._state = 'closed';\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseResolve(writer);\n }\n }\n function WritableStreamFinishInFlightCloseWithError(stream, error) {\n stream._inFlightCloseRequest._reject(error);\n stream._inFlightCloseRequest = undefined;\n // Never execute sink abort() after sink close().\n if (stream._pendingAbortRequest !== undefined) {\n stream._pendingAbortRequest._reject(error);\n stream._pendingAbortRequest = undefined;\n }\n WritableStreamDealWithRejection(stream, error);\n }\n // TODO(ricea): Fix alphabetical order.\n function WritableStreamCloseQueuedOrInFlight(stream) {\n if (stream._closeRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamHasOperationMarkedInFlight(stream) {\n if (stream._inFlightWriteRequest === undefined && stream._inFlightCloseRequest === undefined) {\n return false;\n }\n return true;\n }\n function WritableStreamMarkCloseRequestInFlight(stream) {\n stream._inFlightCloseRequest = stream._closeRequest;\n stream._closeRequest = undefined;\n }\n function WritableStreamMarkFirstWriteRequestInFlight(stream) {\n stream._inFlightWriteRequest = stream._writeRequests.shift();\n }\n function WritableStreamRejectCloseAndClosedPromiseIfNeeded(stream) {\n if (stream._closeRequest !== undefined) {\n stream._closeRequest._reject(stream._storedError);\n stream._closeRequest = undefined;\n }\n const writer = stream._writer;\n if (writer !== undefined) {\n defaultWriterClosedPromiseReject(writer, stream._storedError);\n }\n }\n function WritableStreamUpdateBackpressure(stream, backpressure) {\n const writer = stream._writer;\n if (writer !== undefined && backpressure !== stream._backpressure) {\n if (backpressure) {\n defaultWriterReadyPromiseReset(writer);\n }\n else {\n defaultWriterReadyPromiseResolve(writer);\n }\n }\n stream._backpressure = backpressure;\n }\n /**\n * A default writer vended by a {@link WritableStream}.\n *\n * @public\n */\n class WritableStreamDefaultWriter {\n constructor(stream) {\n assertRequiredArgument(stream, 1, 'WritableStreamDefaultWriter');\n assertWritableStream(stream, 'First parameter');\n if (IsWritableStreamLocked(stream)) {\n throw new TypeError('This stream has already been locked for exclusive writing by another writer');\n }\n this._ownerWritableStream = stream;\n stream._writer = this;\n const state = stream._state;\n if (state === 'writable') {\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._backpressure) {\n defaultWriterReadyPromiseInitialize(this);\n }\n else {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n }\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'erroring') {\n defaultWriterReadyPromiseInitializeAsRejected(this, stream._storedError);\n defaultWriterClosedPromiseInitialize(this);\n }\n else if (state === 'closed') {\n defaultWriterReadyPromiseInitializeAsResolved(this);\n defaultWriterClosedPromiseInitializeAsResolved(this);\n }\n else {\n const storedError = stream._storedError;\n defaultWriterReadyPromiseInitializeAsRejected(this, storedError);\n defaultWriterClosedPromiseInitializeAsRejected(this, storedError);\n }\n }\n /**\n * Returns a promise that will be fulfilled when the stream becomes closed, or rejected if the stream ever errors or\n * the writer’s lock is released before the stream finishes closing.\n */\n get closed() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('closed'));\n }\n return this._closedPromise;\n }\n /**\n * Returns the desired size to fill the stream’s internal queue. It can be negative, if the queue is over-full.\n * A producer can use this information to determine the right amount of data to write.\n *\n * It will be `null` if the stream cannot be successfully written to (due to either being errored, or having an abort\n * queued up). It will return zero if the stream is closed. And the getter will throw an exception if invoked when\n * the writer’s lock is released.\n */\n get desiredSize() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('desiredSize');\n }\n if (this._ownerWritableStream === undefined) {\n throw defaultWriterLockException('desiredSize');\n }\n return WritableStreamDefaultWriterGetDesiredSize(this);\n }\n /**\n * Returns a promise that will be fulfilled when the desired size to fill the stream’s internal queue transitions\n * from non-positive to positive, signaling that it is no longer applying backpressure. Once the desired size dips\n * back to zero or below, the getter will return a new promise that stays pending until the next transition.\n *\n * If the stream becomes errored or aborted, or the writer’s lock is released, the returned promise will become\n * rejected.\n */\n get ready() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('ready'));\n }\n return this._readyPromise;\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.abort | stream.abort(reason)}.\n */\n abort(reason = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('abort'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('abort'));\n }\n return WritableStreamDefaultWriterAbort(this, reason);\n }\n /**\n * If the reader is active, behaves the same as {@link WritableStream.close | stream.close()}.\n */\n close() {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('close'));\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('close'));\n }\n if (WritableStreamCloseQueuedOrInFlight(stream)) {\n return promiseRejectedWith(new TypeError('Cannot close an already-closing stream'));\n }\n return WritableStreamDefaultWriterClose(this);\n }\n /**\n * Releases the writer’s lock on the corresponding stream. After the lock is released, the writer is no longer active.\n * If the associated stream is errored when the lock is released, the writer will appear errored in the same way from\n * now on; otherwise, the writer will appear closed.\n *\n * Note that the lock can still be released even if some ongoing writes have not yet finished (i.e. even if the\n * promises returned from previous calls to {@link WritableStreamDefaultWriter.write | write()} have not yet settled).\n * It’s not necessary to hold the lock on the writer for the duration of the write; the lock instead simply prevents\n * other producers from writing in an interleaved manner.\n */\n releaseLock() {\n if (!IsWritableStreamDefaultWriter(this)) {\n throw defaultWriterBrandCheckException('releaseLock');\n }\n const stream = this._ownerWritableStream;\n if (stream === undefined) {\n return;\n }\n WritableStreamDefaultWriterRelease(this);\n }\n write(chunk = undefined) {\n if (!IsWritableStreamDefaultWriter(this)) {\n return promiseRejectedWith(defaultWriterBrandCheckException('write'));\n }\n if (this._ownerWritableStream === undefined) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n return WritableStreamDefaultWriterWrite(this, chunk);\n }\n }\n Object.defineProperties(WritableStreamDefaultWriter.prototype, {\n abort: { enumerable: true },\n close: { enumerable: true },\n releaseLock: { enumerable: true },\n write: { enumerable: true },\n closed: { enumerable: true },\n desiredSize: { enumerable: true },\n ready: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultWriter.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultWriter',\n configurable: true\n });\n }\n // Abstract operations for the WritableStreamDefaultWriter.\n function IsWritableStreamDefaultWriter(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_ownerWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultWriter;\n }\n // A client of WritableStreamDefaultWriter may use these functions directly to bypass state check.\n function WritableStreamDefaultWriterAbort(writer, reason) {\n const stream = writer._ownerWritableStream;\n return WritableStreamAbort(stream, reason);\n }\n function WritableStreamDefaultWriterClose(writer) {\n const stream = writer._ownerWritableStream;\n return WritableStreamClose(stream);\n }\n function WritableStreamDefaultWriterCloseWithErrorPropagation(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n return WritableStreamDefaultWriterClose(writer);\n }\n function WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, error) {\n if (writer._closedPromiseState === 'pending') {\n defaultWriterClosedPromiseReject(writer, error);\n }\n else {\n defaultWriterClosedPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, error) {\n if (writer._readyPromiseState === 'pending') {\n defaultWriterReadyPromiseReject(writer, error);\n }\n else {\n defaultWriterReadyPromiseResetToRejected(writer, error);\n }\n }\n function WritableStreamDefaultWriterGetDesiredSize(writer) {\n const stream = writer._ownerWritableStream;\n const state = stream._state;\n if (state === 'errored' || state === 'erroring') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return WritableStreamDefaultControllerGetDesiredSize(stream._writableStreamController);\n }\n function WritableStreamDefaultWriterRelease(writer) {\n const stream = writer._ownerWritableStream;\n const releasedError = new TypeError(`Writer was released and can no longer be used to monitor the stream's closedness`);\n WritableStreamDefaultWriterEnsureReadyPromiseRejected(writer, releasedError);\n // The state transitions to \"errored\" before the sink abort() method runs, but the writer.closed promise is not\n // rejected until afterwards. This means that simply testing state will not work.\n WritableStreamDefaultWriterEnsureClosedPromiseRejected(writer, releasedError);\n stream._writer = undefined;\n writer._ownerWritableStream = undefined;\n }\n function WritableStreamDefaultWriterWrite(writer, chunk) {\n const stream = writer._ownerWritableStream;\n const controller = stream._writableStreamController;\n const chunkSize = WritableStreamDefaultControllerGetChunkSize(controller, chunk);\n if (stream !== writer._ownerWritableStream) {\n return promiseRejectedWith(defaultWriterLockException('write to'));\n }\n const state = stream._state;\n if (state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n if (WritableStreamCloseQueuedOrInFlight(stream) || state === 'closed') {\n return promiseRejectedWith(new TypeError('The stream is closing or closed and cannot be written to'));\n }\n if (state === 'erroring') {\n return promiseRejectedWith(stream._storedError);\n }\n const promise = WritableStreamAddWriteRequest(stream);\n WritableStreamDefaultControllerWrite(controller, chunk, chunkSize);\n return promise;\n }\n const closeSentinel = {};\n /**\n * Allows control of a {@link WritableStream | writable stream}'s state and internal queue.\n *\n * @public\n */\n class WritableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * The reason which was passed to `WritableStream.abort(reason)` when the stream was aborted.\n *\n * @deprecated\n * This property has been removed from the specification, see https://github.com/whatwg/streams/pull/1177.\n * Use {@link WritableStreamDefaultController.signal}'s `reason` instead.\n */\n get abortReason() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('abortReason');\n }\n return this._abortReason;\n }\n /**\n * An `AbortSignal` that can be used to abort the pending write or close operation when the stream is aborted.\n */\n get signal() {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('signal');\n }\n if (this._abortController === undefined) {\n // Older browsers or older Node versions may not support `AbortController` or `AbortSignal`.\n // We don't want to bundle and ship an `AbortController` polyfill together with our polyfill,\n // so instead we only implement support for `signal` if we find a global `AbortController` constructor.\n throw new TypeError('WritableStreamDefaultController.prototype.signal is not supported');\n }\n return this._abortController.signal;\n }\n /**\n * Closes the controlled writable stream, making all future interactions with it fail with the given error `e`.\n *\n * This method is rarely used, since usually it suffices to return a rejected promise from one of the underlying\n * sink's methods. However, it can be useful for suddenly shutting down a stream in response to an event outside the\n * normal lifecycle of interactions with the underlying sink.\n */\n error(e = undefined) {\n if (!IsWritableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$2('error');\n }\n const state = this._controlledWritableStream._state;\n if (state !== 'writable') {\n // The stream is closed, errored or will be soon. The sink can't do anything useful if it gets an error here, so\n // just treat it as a no-op.\n return;\n }\n WritableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [AbortSteps](reason) {\n const result = this._abortAlgorithm(reason);\n WritableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [ErrorSteps]() {\n ResetQueue(this);\n }\n }\n Object.defineProperties(WritableStreamDefaultController.prototype, {\n abortReason: { enumerable: true },\n signal: { enumerable: true },\n error: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(WritableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'WritableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations implementing interface required by the WritableStream.\n function IsWritableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledWritableStream')) {\n return false;\n }\n return x instanceof WritableStreamDefaultController;\n }\n function SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledWritableStream = stream;\n stream._writableStreamController = controller;\n // Need to set the slots so that the assert doesn't fire. In the spec the slots already exist implicitly.\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._abortReason = undefined;\n controller._abortController = createAbortController();\n controller._started = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._writeAlgorithm = writeAlgorithm;\n controller._closeAlgorithm = closeAlgorithm;\n controller._abortAlgorithm = abortAlgorithm;\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n const startResult = startAlgorithm();\n const startPromise = promiseResolvedWith(startResult);\n uponPromise(startPromise, () => {\n controller._started = true;\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, r => {\n controller._started = true;\n WritableStreamDealWithRejection(stream, r);\n });\n }\n function SetUpWritableStreamDefaultControllerFromUnderlyingSink(stream, underlyingSink, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(WritableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let writeAlgorithm = () => promiseResolvedWith(undefined);\n let closeAlgorithm = () => promiseResolvedWith(undefined);\n let abortAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSink.start !== undefined) {\n startAlgorithm = () => underlyingSink.start(controller);\n }\n if (underlyingSink.write !== undefined) {\n writeAlgorithm = chunk => underlyingSink.write(chunk, controller);\n }\n if (underlyingSink.close !== undefined) {\n closeAlgorithm = () => underlyingSink.close();\n }\n if (underlyingSink.abort !== undefined) {\n abortAlgorithm = reason => underlyingSink.abort(reason);\n }\n SetUpWritableStreamDefaultController(stream, controller, startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // ClearAlgorithms may be called twice. Erroring the same stream in multiple ways will often result in redundant calls.\n function WritableStreamDefaultControllerClearAlgorithms(controller) {\n controller._writeAlgorithm = undefined;\n controller._closeAlgorithm = undefined;\n controller._abortAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n function WritableStreamDefaultControllerClose(controller) {\n EnqueueValueWithSize(controller, closeSentinel, 0);\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n function WritableStreamDefaultControllerGetChunkSize(controller, chunk) {\n try {\n return controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, chunkSizeE);\n return 1;\n }\n }\n function WritableStreamDefaultControllerGetDesiredSize(controller) {\n return controller._strategyHWM - controller._queueTotalSize;\n }\n function WritableStreamDefaultControllerWrite(controller, chunk, chunkSize) {\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n WritableStreamDefaultControllerErrorIfNeeded(controller, enqueueE);\n return;\n }\n const stream = controller._controlledWritableStream;\n if (!WritableStreamCloseQueuedOrInFlight(stream) && stream._state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }\n // Abstract operations for the WritableStreamDefaultController.\n function WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller) {\n const stream = controller._controlledWritableStream;\n if (!controller._started) {\n return;\n }\n if (stream._inFlightWriteRequest !== undefined) {\n return;\n }\n const state = stream._state;\n if (state === 'erroring') {\n WritableStreamFinishErroring(stream);\n return;\n }\n if (controller._queue.length === 0) {\n return;\n }\n const value = PeekQueueValue(controller);\n if (value === closeSentinel) {\n WritableStreamDefaultControllerProcessClose(controller);\n }\n else {\n WritableStreamDefaultControllerProcessWrite(controller, value);\n }\n }\n function WritableStreamDefaultControllerErrorIfNeeded(controller, error) {\n if (controller._controlledWritableStream._state === 'writable') {\n WritableStreamDefaultControllerError(controller, error);\n }\n }\n function WritableStreamDefaultControllerProcessClose(controller) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkCloseRequestInFlight(stream);\n DequeueValue(controller);\n const sinkClosePromise = controller._closeAlgorithm();\n WritableStreamDefaultControllerClearAlgorithms(controller);\n uponPromise(sinkClosePromise, () => {\n WritableStreamFinishInFlightClose(stream);\n }, reason => {\n WritableStreamFinishInFlightCloseWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerProcessWrite(controller, chunk) {\n const stream = controller._controlledWritableStream;\n WritableStreamMarkFirstWriteRequestInFlight(stream);\n const sinkWritePromise = controller._writeAlgorithm(chunk);\n uponPromise(sinkWritePromise, () => {\n WritableStreamFinishInFlightWrite(stream);\n const state = stream._state;\n DequeueValue(controller);\n if (!WritableStreamCloseQueuedOrInFlight(stream) && state === 'writable') {\n const backpressure = WritableStreamDefaultControllerGetBackpressure(controller);\n WritableStreamUpdateBackpressure(stream, backpressure);\n }\n WritableStreamDefaultControllerAdvanceQueueIfNeeded(controller);\n }, reason => {\n if (stream._state === 'writable') {\n WritableStreamDefaultControllerClearAlgorithms(controller);\n }\n WritableStreamFinishInFlightWriteWithError(stream, reason);\n });\n }\n function WritableStreamDefaultControllerGetBackpressure(controller) {\n const desiredSize = WritableStreamDefaultControllerGetDesiredSize(controller);\n return desiredSize <= 0;\n }\n // A client of WritableStreamDefaultController may use these functions directly to bypass state check.\n function WritableStreamDefaultControllerError(controller, error) {\n const stream = controller._controlledWritableStream;\n WritableStreamDefaultControllerClearAlgorithms(controller);\n WritableStreamStartErroring(stream, error);\n }\n // Helper functions for the WritableStream.\n function streamBrandCheckException$2(name) {\n return new TypeError(`WritableStream.prototype.${name} can only be used on a WritableStream`);\n }\n // Helper functions for the WritableStreamDefaultController.\n function defaultControllerBrandCheckException$2(name) {\n return new TypeError(`WritableStreamDefaultController.prototype.${name} can only be used on a WritableStreamDefaultController`);\n }\n // Helper functions for the WritableStreamDefaultWriter.\n function defaultWriterBrandCheckException(name) {\n return new TypeError(`WritableStreamDefaultWriter.prototype.${name} can only be used on a WritableStreamDefaultWriter`);\n }\n function defaultWriterLockException(name) {\n return new TypeError('Cannot ' + name + ' a stream using a released writer');\n }\n function defaultWriterClosedPromiseInitialize(writer) {\n writer._closedPromise = newPromise((resolve, reject) => {\n writer._closedPromise_resolve = resolve;\n writer._closedPromise_reject = reject;\n writer._closedPromiseState = 'pending';\n });\n }\n function defaultWriterClosedPromiseInitializeAsRejected(writer, reason) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseReject(writer, reason);\n }\n function defaultWriterClosedPromiseInitializeAsResolved(writer) {\n defaultWriterClosedPromiseInitialize(writer);\n defaultWriterClosedPromiseResolve(writer);\n }\n function defaultWriterClosedPromiseReject(writer, reason) {\n if (writer._closedPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._closedPromise);\n writer._closedPromise_reject(reason);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'rejected';\n }\n function defaultWriterClosedPromiseResetToRejected(writer, reason) {\n defaultWriterClosedPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterClosedPromiseResolve(writer) {\n if (writer._closedPromise_resolve === undefined) {\n return;\n }\n writer._closedPromise_resolve(undefined);\n writer._closedPromise_resolve = undefined;\n writer._closedPromise_reject = undefined;\n writer._closedPromiseState = 'resolved';\n }\n function defaultWriterReadyPromiseInitialize(writer) {\n writer._readyPromise = newPromise((resolve, reject) => {\n writer._readyPromise_resolve = resolve;\n writer._readyPromise_reject = reject;\n });\n writer._readyPromiseState = 'pending';\n }\n function defaultWriterReadyPromiseInitializeAsRejected(writer, reason) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseReject(writer, reason);\n }\n function defaultWriterReadyPromiseInitializeAsResolved(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n defaultWriterReadyPromiseResolve(writer);\n }\n function defaultWriterReadyPromiseReject(writer, reason) {\n if (writer._readyPromise_reject === undefined) {\n return;\n }\n setPromiseIsHandledToTrue(writer._readyPromise);\n writer._readyPromise_reject(reason);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'rejected';\n }\n function defaultWriterReadyPromiseReset(writer) {\n defaultWriterReadyPromiseInitialize(writer);\n }\n function defaultWriterReadyPromiseResetToRejected(writer, reason) {\n defaultWriterReadyPromiseInitializeAsRejected(writer, reason);\n }\n function defaultWriterReadyPromiseResolve(writer) {\n if (writer._readyPromise_resolve === undefined) {\n return;\n }\n writer._readyPromise_resolve(undefined);\n writer._readyPromise_resolve = undefined;\n writer._readyPromise_reject = undefined;\n writer._readyPromiseState = 'fulfilled';\n }\n\n /// \n const NativeDOMException = typeof DOMException !== 'undefined' ? DOMException : undefined;\n\n /// \n function isDOMExceptionConstructor(ctor) {\n if (!(typeof ctor === 'function' || typeof ctor === 'object')) {\n return false;\n }\n try {\n new ctor();\n return true;\n }\n catch (_a) {\n return false;\n }\n }\n function createDOMExceptionPolyfill() {\n // eslint-disable-next-line no-shadow\n const ctor = function DOMException(message, name) {\n this.message = message || '';\n this.name = name || 'Error';\n if (Error.captureStackTrace) {\n Error.captureStackTrace(this, this.constructor);\n }\n };\n ctor.prototype = Object.create(Error.prototype);\n Object.defineProperty(ctor.prototype, 'constructor', { value: ctor, writable: true, configurable: true });\n return ctor;\n }\n // eslint-disable-next-line no-redeclare\n const DOMException$1 = isDOMExceptionConstructor(NativeDOMException) ? NativeDOMException : createDOMExceptionPolyfill();\n\n function ReadableStreamPipeTo(source, dest, preventClose, preventAbort, preventCancel, signal) {\n const reader = AcquireReadableStreamDefaultReader(source);\n const writer = AcquireWritableStreamDefaultWriter(dest);\n source._disturbed = true;\n let shuttingDown = false;\n // This is used to keep track of the spec's requirement that we wait for ongoing writes during shutdown.\n let currentWrite = promiseResolvedWith(undefined);\n return newPromise((resolve, reject) => {\n let abortAlgorithm;\n if (signal !== undefined) {\n abortAlgorithm = () => {\n const error = new DOMException$1('Aborted', 'AbortError');\n const actions = [];\n if (!preventAbort) {\n actions.push(() => {\n if (dest._state === 'writable') {\n return WritableStreamAbort(dest, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n if (!preventCancel) {\n actions.push(() => {\n if (source._state === 'readable') {\n return ReadableStreamCancel(source, error);\n }\n return promiseResolvedWith(undefined);\n });\n }\n shutdownWithAction(() => Promise.all(actions.map(action => action())), true, error);\n };\n if (signal.aborted) {\n abortAlgorithm();\n return;\n }\n signal.addEventListener('abort', abortAlgorithm);\n }\n // Using reader and writer, read all chunks from this and write them to dest\n // - Backpressure must be enforced\n // - Shutdown must stop all activity\n function pipeLoop() {\n return newPromise((resolveLoop, rejectLoop) => {\n function next(done) {\n if (done) {\n resolveLoop();\n }\n else {\n // Use `PerformPromiseThen` instead of `uponPromise` to avoid\n // adding unnecessary `.catch(rethrowAssertionErrorRejection)` handlers\n PerformPromiseThen(pipeStep(), next, rejectLoop);\n }\n }\n next(false);\n });\n }\n function pipeStep() {\n if (shuttingDown) {\n return promiseResolvedWith(true);\n }\n return PerformPromiseThen(writer._readyPromise, () => {\n return newPromise((resolveRead, rejectRead) => {\n ReadableStreamDefaultReaderRead(reader, {\n _chunkSteps: chunk => {\n currentWrite = PerformPromiseThen(WritableStreamDefaultWriterWrite(writer, chunk), undefined, noop);\n resolveRead(false);\n },\n _closeSteps: () => resolveRead(true),\n _errorSteps: rejectRead\n });\n });\n });\n }\n // Errors must be propagated forward\n isOrBecomesErrored(source, reader._closedPromise, storedError => {\n if (!preventAbort) {\n shutdownWithAction(() => WritableStreamAbort(dest, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Errors must be propagated backward\n isOrBecomesErrored(dest, writer._closedPromise, storedError => {\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, storedError), true, storedError);\n }\n else {\n shutdown(true, storedError);\n }\n });\n // Closing must be propagated forward\n isOrBecomesClosed(source, reader._closedPromise, () => {\n if (!preventClose) {\n shutdownWithAction(() => WritableStreamDefaultWriterCloseWithErrorPropagation(writer));\n }\n else {\n shutdown();\n }\n });\n // Closing must be propagated backward\n if (WritableStreamCloseQueuedOrInFlight(dest) || dest._state === 'closed') {\n const destClosed = new TypeError('the destination writable stream closed before all data could be piped to it');\n if (!preventCancel) {\n shutdownWithAction(() => ReadableStreamCancel(source, destClosed), true, destClosed);\n }\n else {\n shutdown(true, destClosed);\n }\n }\n setPromiseIsHandledToTrue(pipeLoop());\n function waitForWritesToFinish() {\n // Another write may have started while we were waiting on this currentWrite, so we have to be sure to wait\n // for that too.\n const oldCurrentWrite = currentWrite;\n return PerformPromiseThen(currentWrite, () => oldCurrentWrite !== currentWrite ? waitForWritesToFinish() : undefined);\n }\n function isOrBecomesErrored(stream, promise, action) {\n if (stream._state === 'errored') {\n action(stream._storedError);\n }\n else {\n uponRejection(promise, action);\n }\n }\n function isOrBecomesClosed(stream, promise, action) {\n if (stream._state === 'closed') {\n action();\n }\n else {\n uponFulfillment(promise, action);\n }\n }\n function shutdownWithAction(action, originalIsError, originalError) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), doTheRest);\n }\n else {\n doTheRest();\n }\n function doTheRest() {\n uponPromise(action(), () => finalize(originalIsError, originalError), newError => finalize(true, newError));\n }\n }\n function shutdown(isError, error) {\n if (shuttingDown) {\n return;\n }\n shuttingDown = true;\n if (dest._state === 'writable' && !WritableStreamCloseQueuedOrInFlight(dest)) {\n uponFulfillment(waitForWritesToFinish(), () => finalize(isError, error));\n }\n else {\n finalize(isError, error);\n }\n }\n function finalize(isError, error) {\n WritableStreamDefaultWriterRelease(writer);\n ReadableStreamReaderGenericRelease(reader);\n if (signal !== undefined) {\n signal.removeEventListener('abort', abortAlgorithm);\n }\n if (isError) {\n reject(error);\n }\n else {\n resolve(undefined);\n }\n }\n });\n }\n\n /**\n * Allows control of a {@link ReadableStream | readable stream}'s state and internal queue.\n *\n * @public\n */\n class ReadableStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the controlled stream's internal queue. It can be negative, if the queue is\n * over-full. An underlying source ought to use this information to determine when and how to apply backpressure.\n */\n get desiredSize() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('desiredSize');\n }\n return ReadableStreamDefaultControllerGetDesiredSize(this);\n }\n /**\n * Closes the controlled readable stream. Consumers will still be able to read any previously-enqueued chunks from\n * the stream, but once those are read, the stream will become closed.\n */\n close() {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('close');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits close');\n }\n ReadableStreamDefaultControllerClose(this);\n }\n enqueue(chunk = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('enqueue');\n }\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(this)) {\n throw new TypeError('The stream is not in a state that permits enqueue');\n }\n return ReadableStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors the controlled readable stream, making all future interactions with it fail with the given error `e`.\n */\n error(e = undefined) {\n if (!IsReadableStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException$1('error');\n }\n ReadableStreamDefaultControllerError(this, e);\n }\n /** @internal */\n [CancelSteps](reason) {\n ResetQueue(this);\n const result = this._cancelAlgorithm(reason);\n ReadableStreamDefaultControllerClearAlgorithms(this);\n return result;\n }\n /** @internal */\n [PullSteps](readRequest) {\n const stream = this._controlledReadableStream;\n if (this._queue.length > 0) {\n const chunk = DequeueValue(this);\n if (this._closeRequested && this._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(this);\n ReadableStreamClose(stream);\n }\n else {\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n readRequest._chunkSteps(chunk);\n }\n else {\n ReadableStreamAddReadRequest(stream, readRequest);\n ReadableStreamDefaultControllerCallPullIfNeeded(this);\n }\n }\n }\n Object.defineProperties(ReadableStreamDefaultController.prototype, {\n close: { enumerable: true },\n enqueue: { enumerable: true },\n error: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStreamDefaultController',\n configurable: true\n });\n }\n // Abstract operations for the ReadableStreamDefaultController.\n function IsReadableStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledReadableStream')) {\n return false;\n }\n return x instanceof ReadableStreamDefaultController;\n }\n function ReadableStreamDefaultControllerCallPullIfNeeded(controller) {\n const shouldPull = ReadableStreamDefaultControllerShouldCallPull(controller);\n if (!shouldPull) {\n return;\n }\n if (controller._pulling) {\n controller._pullAgain = true;\n return;\n }\n controller._pulling = true;\n const pullPromise = controller._pullAlgorithm();\n uponPromise(pullPromise, () => {\n controller._pulling = false;\n if (controller._pullAgain) {\n controller._pullAgain = false;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n }, e => {\n ReadableStreamDefaultControllerError(controller, e);\n });\n }\n function ReadableStreamDefaultControllerShouldCallPull(controller) {\n const stream = controller._controlledReadableStream;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return false;\n }\n if (!controller._started) {\n return false;\n }\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n return true;\n }\n const desiredSize = ReadableStreamDefaultControllerGetDesiredSize(controller);\n if (desiredSize > 0) {\n return true;\n }\n return false;\n }\n function ReadableStreamDefaultControllerClearAlgorithms(controller) {\n controller._pullAlgorithm = undefined;\n controller._cancelAlgorithm = undefined;\n controller._strategySizeAlgorithm = undefined;\n }\n // A client of ReadableStreamDefaultController may use these functions directly to bypass state check.\n function ReadableStreamDefaultControllerClose(controller) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n controller._closeRequested = true;\n if (controller._queue.length === 0) {\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamClose(stream);\n }\n }\n function ReadableStreamDefaultControllerEnqueue(controller, chunk) {\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(controller)) {\n return;\n }\n const stream = controller._controlledReadableStream;\n if (IsReadableStreamLocked(stream) && ReadableStreamGetNumReadRequests(stream) > 0) {\n ReadableStreamFulfillReadRequest(stream, chunk, false);\n }\n else {\n let chunkSize;\n try {\n chunkSize = controller._strategySizeAlgorithm(chunk);\n }\n catch (chunkSizeE) {\n ReadableStreamDefaultControllerError(controller, chunkSizeE);\n throw chunkSizeE;\n }\n try {\n EnqueueValueWithSize(controller, chunk, chunkSize);\n }\n catch (enqueueE) {\n ReadableStreamDefaultControllerError(controller, enqueueE);\n throw enqueueE;\n }\n }\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }\n function ReadableStreamDefaultControllerError(controller, e) {\n const stream = controller._controlledReadableStream;\n if (stream._state !== 'readable') {\n return;\n }\n ResetQueue(controller);\n ReadableStreamDefaultControllerClearAlgorithms(controller);\n ReadableStreamError(stream, e);\n }\n function ReadableStreamDefaultControllerGetDesiredSize(controller) {\n const state = controller._controlledReadableStream._state;\n if (state === 'errored') {\n return null;\n }\n if (state === 'closed') {\n return 0;\n }\n return controller._strategyHWM - controller._queueTotalSize;\n }\n // This is used in the implementation of TransformStream.\n function ReadableStreamDefaultControllerHasBackpressure(controller) {\n if (ReadableStreamDefaultControllerShouldCallPull(controller)) {\n return false;\n }\n return true;\n }\n function ReadableStreamDefaultControllerCanCloseOrEnqueue(controller) {\n const state = controller._controlledReadableStream._state;\n if (!controller._closeRequested && state === 'readable') {\n return true;\n }\n return false;\n }\n function SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm) {\n controller._controlledReadableStream = stream;\n controller._queue = undefined;\n controller._queueTotalSize = undefined;\n ResetQueue(controller);\n controller._started = false;\n controller._closeRequested = false;\n controller._pullAgain = false;\n controller._pulling = false;\n controller._strategySizeAlgorithm = sizeAlgorithm;\n controller._strategyHWM = highWaterMark;\n controller._pullAlgorithm = pullAlgorithm;\n controller._cancelAlgorithm = cancelAlgorithm;\n stream._readableStreamController = controller;\n const startResult = startAlgorithm();\n uponPromise(promiseResolvedWith(startResult), () => {\n controller._started = true;\n ReadableStreamDefaultControllerCallPullIfNeeded(controller);\n }, r => {\n ReadableStreamDefaultControllerError(controller, r);\n });\n }\n function SetUpReadableStreamDefaultControllerFromUnderlyingSource(stream, underlyingSource, highWaterMark, sizeAlgorithm) {\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n let startAlgorithm = () => undefined;\n let pullAlgorithm = () => promiseResolvedWith(undefined);\n let cancelAlgorithm = () => promiseResolvedWith(undefined);\n if (underlyingSource.start !== undefined) {\n startAlgorithm = () => underlyingSource.start(controller);\n }\n if (underlyingSource.pull !== undefined) {\n pullAlgorithm = () => underlyingSource.pull(controller);\n }\n if (underlyingSource.cancel !== undefined) {\n cancelAlgorithm = reason => underlyingSource.cancel(reason);\n }\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n }\n // Helper functions for the ReadableStreamDefaultController.\n function defaultControllerBrandCheckException$1(name) {\n return new TypeError(`ReadableStreamDefaultController.prototype.${name} can only be used on a ReadableStreamDefaultController`);\n }\n\n function ReadableStreamTee(stream, cloneForBranch2) {\n if (IsReadableByteStreamController(stream._readableStreamController)) {\n return ReadableByteStreamTee(stream);\n }\n return ReadableStreamDefaultTee(stream);\n }\n function ReadableStreamDefaultTee(stream, cloneForBranch2) {\n const reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgain = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function pullAlgorithm() {\n if (reading) {\n readAgain = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgain = false;\n const chunk1 = chunk;\n const chunk2 = chunk;\n // There is no way to access the cloning code right now in the reference implementation.\n // If we add one then we'll need an implementation for serializable objects.\n // if (!canceled2 && cloneForBranch2) {\n // chunk2 = StructuredDeserialize(StructuredSerialize(chunk2));\n // }\n if (!canceled1) {\n ReadableStreamDefaultControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgain) {\n pullAlgorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableStreamDefaultControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableStreamDefaultControllerClose(branch2._readableStreamController);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n // do nothing\n }\n branch1 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel1Algorithm);\n branch2 = CreateReadableStream(startAlgorithm, pullAlgorithm, cancel2Algorithm);\n uponRejection(reader._closedPromise, (r) => {\n ReadableStreamDefaultControllerError(branch1._readableStreamController, r);\n ReadableStreamDefaultControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n return [branch1, branch2];\n }\n function ReadableByteStreamTee(stream) {\n let reader = AcquireReadableStreamDefaultReader(stream);\n let reading = false;\n let readAgainForBranch1 = false;\n let readAgainForBranch2 = false;\n let canceled1 = false;\n let canceled2 = false;\n let reason1;\n let reason2;\n let branch1;\n let branch2;\n let resolveCancelPromise;\n const cancelPromise = newPromise(resolve => {\n resolveCancelPromise = resolve;\n });\n function forwardReaderError(thisReader) {\n uponRejection(thisReader._closedPromise, r => {\n if (thisReader !== reader) {\n return;\n }\n ReadableByteStreamControllerError(branch1._readableStreamController, r);\n ReadableByteStreamControllerError(branch2._readableStreamController, r);\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n });\n }\n function pullWithDefaultReader() {\n if (IsReadableStreamBYOBReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamDefaultReader(stream);\n forwardReaderError(reader);\n }\n const readRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const chunk1 = chunk;\n let chunk2 = chunk;\n if (!canceled1 && !canceled2) {\n try {\n chunk2 = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(branch1._readableStreamController, cloneE);\n ReadableByteStreamControllerError(branch2._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n }\n if (!canceled1) {\n ReadableByteStreamControllerEnqueue(branch1._readableStreamController, chunk1);\n }\n if (!canceled2) {\n ReadableByteStreamControllerEnqueue(branch2._readableStreamController, chunk2);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: () => {\n reading = false;\n if (!canceled1) {\n ReadableByteStreamControllerClose(branch1._readableStreamController);\n }\n if (!canceled2) {\n ReadableByteStreamControllerClose(branch2._readableStreamController);\n }\n if (branch1._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch1._readableStreamController, 0);\n }\n if (branch2._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(branch2._readableStreamController, 0);\n }\n if (!canceled1 || !canceled2) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamDefaultReaderRead(reader, readRequest);\n }\n function pullWithBYOBReader(view, forBranch2) {\n if (IsReadableStreamDefaultReader(reader)) {\n ReadableStreamReaderGenericRelease(reader);\n reader = AcquireReadableStreamBYOBReader(stream);\n forwardReaderError(reader);\n }\n const byobBranch = forBranch2 ? branch2 : branch1;\n const otherBranch = forBranch2 ? branch1 : branch2;\n const readIntoRequest = {\n _chunkSteps: chunk => {\n // This needs to be delayed a microtask because it takes at least a microtask to detect errors (using\n // reader._closedPromise below), and we want errors in stream to error both branches immediately. We cannot let\n // successful synchronously-available reads get ahead of asynchronously-available errors.\n queueMicrotask(() => {\n readAgainForBranch1 = false;\n readAgainForBranch2 = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!otherCanceled) {\n let clonedChunk;\n try {\n clonedChunk = CloneAsUint8Array(chunk);\n }\n catch (cloneE) {\n ReadableByteStreamControllerError(byobBranch._readableStreamController, cloneE);\n ReadableByteStreamControllerError(otherBranch._readableStreamController, cloneE);\n resolveCancelPromise(ReadableStreamCancel(stream, cloneE));\n return;\n }\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n ReadableByteStreamControllerEnqueue(otherBranch._readableStreamController, clonedChunk);\n }\n else if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n reading = false;\n if (readAgainForBranch1) {\n pull1Algorithm();\n }\n else if (readAgainForBranch2) {\n pull2Algorithm();\n }\n });\n },\n _closeSteps: chunk => {\n reading = false;\n const byobCanceled = forBranch2 ? canceled2 : canceled1;\n const otherCanceled = forBranch2 ? canceled1 : canceled2;\n if (!byobCanceled) {\n ReadableByteStreamControllerClose(byobBranch._readableStreamController);\n }\n if (!otherCanceled) {\n ReadableByteStreamControllerClose(otherBranch._readableStreamController);\n }\n if (chunk !== undefined) {\n if (!byobCanceled) {\n ReadableByteStreamControllerRespondWithNewView(byobBranch._readableStreamController, chunk);\n }\n if (!otherCanceled && otherBranch._readableStreamController._pendingPullIntos.length > 0) {\n ReadableByteStreamControllerRespond(otherBranch._readableStreamController, 0);\n }\n }\n if (!byobCanceled || !otherCanceled) {\n resolveCancelPromise(undefined);\n }\n },\n _errorSteps: () => {\n reading = false;\n }\n };\n ReadableStreamBYOBReaderRead(reader, view, readIntoRequest);\n }\n function pull1Algorithm() {\n if (reading) {\n readAgainForBranch1 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch1._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, false);\n }\n return promiseResolvedWith(undefined);\n }\n function pull2Algorithm() {\n if (reading) {\n readAgainForBranch2 = true;\n return promiseResolvedWith(undefined);\n }\n reading = true;\n const byobRequest = ReadableByteStreamControllerGetBYOBRequest(branch2._readableStreamController);\n if (byobRequest === null) {\n pullWithDefaultReader();\n }\n else {\n pullWithBYOBReader(byobRequest._view, true);\n }\n return promiseResolvedWith(undefined);\n }\n function cancel1Algorithm(reason) {\n canceled1 = true;\n reason1 = reason;\n if (canceled2) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function cancel2Algorithm(reason) {\n canceled2 = true;\n reason2 = reason;\n if (canceled1) {\n const compositeReason = CreateArrayFromList([reason1, reason2]);\n const cancelResult = ReadableStreamCancel(stream, compositeReason);\n resolveCancelPromise(cancelResult);\n }\n return cancelPromise;\n }\n function startAlgorithm() {\n return;\n }\n branch1 = CreateReadableByteStream(startAlgorithm, pull1Algorithm, cancel1Algorithm);\n branch2 = CreateReadableByteStream(startAlgorithm, pull2Algorithm, cancel2Algorithm);\n forwardReaderError(reader);\n return [branch1, branch2];\n }\n\n function convertUnderlyingDefaultOrByteSource(source, context) {\n assertDictionary(source, context);\n const original = source;\n const autoAllocateChunkSize = original === null || original === void 0 ? void 0 : original.autoAllocateChunkSize;\n const cancel = original === null || original === void 0 ? void 0 : original.cancel;\n const pull = original === null || original === void 0 ? void 0 : original.pull;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const type = original === null || original === void 0 ? void 0 : original.type;\n return {\n autoAllocateChunkSize: autoAllocateChunkSize === undefined ?\n undefined :\n convertUnsignedLongLongWithEnforceRange(autoAllocateChunkSize, `${context} has member 'autoAllocateChunkSize' that`),\n cancel: cancel === undefined ?\n undefined :\n convertUnderlyingSourceCancelCallback(cancel, original, `${context} has member 'cancel' that`),\n pull: pull === undefined ?\n undefined :\n convertUnderlyingSourcePullCallback(pull, original, `${context} has member 'pull' that`),\n start: start === undefined ?\n undefined :\n convertUnderlyingSourceStartCallback(start, original, `${context} has member 'start' that`),\n type: type === undefined ? undefined : convertReadableStreamType(type, `${context} has member 'type' that`)\n };\n }\n function convertUnderlyingSourceCancelCallback(fn, original, context) {\n assertFunction(fn, context);\n return (reason) => promiseCall(fn, original, [reason]);\n }\n function convertUnderlyingSourcePullCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertUnderlyingSourceStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertReadableStreamType(type, context) {\n type = `${type}`;\n if (type !== 'bytes') {\n throw new TypeError(`${context} '${type}' is not a valid enumeration value for ReadableStreamType`);\n }\n return type;\n }\n\n function convertReaderOptions(options, context) {\n assertDictionary(options, context);\n const mode = options === null || options === void 0 ? void 0 : options.mode;\n return {\n mode: mode === undefined ? undefined : convertReadableStreamReaderMode(mode, `${context} has member 'mode' that`)\n };\n }\n function convertReadableStreamReaderMode(mode, context) {\n mode = `${mode}`;\n if (mode !== 'byob') {\n throw new TypeError(`${context} '${mode}' is not a valid enumeration value for ReadableStreamReaderMode`);\n }\n return mode;\n }\n\n function convertIteratorOptions(options, context) {\n assertDictionary(options, context);\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n return { preventCancel: Boolean(preventCancel) };\n }\n\n function convertPipeOptions(options, context) {\n assertDictionary(options, context);\n const preventAbort = options === null || options === void 0 ? void 0 : options.preventAbort;\n const preventCancel = options === null || options === void 0 ? void 0 : options.preventCancel;\n const preventClose = options === null || options === void 0 ? void 0 : options.preventClose;\n const signal = options === null || options === void 0 ? void 0 : options.signal;\n if (signal !== undefined) {\n assertAbortSignal(signal, `${context} has member 'signal' that`);\n }\n return {\n preventAbort: Boolean(preventAbort),\n preventCancel: Boolean(preventCancel),\n preventClose: Boolean(preventClose),\n signal\n };\n }\n function assertAbortSignal(signal, context) {\n if (!isAbortSignal(signal)) {\n throw new TypeError(`${context} is not an AbortSignal.`);\n }\n }\n\n function convertReadableWritablePair(pair, context) {\n assertDictionary(pair, context);\n const readable = pair === null || pair === void 0 ? void 0 : pair.readable;\n assertRequiredField(readable, 'readable', 'ReadableWritablePair');\n assertReadableStream(readable, `${context} has member 'readable' that`);\n const writable = pair === null || pair === void 0 ? void 0 : pair.writable;\n assertRequiredField(writable, 'writable', 'ReadableWritablePair');\n assertWritableStream(writable, `${context} has member 'writable' that`);\n return { readable, writable };\n }\n\n /**\n * A readable stream represents a source of data, from which you can read.\n *\n * @public\n */\n class ReadableStream {\n constructor(rawUnderlyingSource = {}, rawStrategy = {}) {\n if (rawUnderlyingSource === undefined) {\n rawUnderlyingSource = null;\n }\n else {\n assertObject(rawUnderlyingSource, 'First parameter');\n }\n const strategy = convertQueuingStrategy(rawStrategy, 'Second parameter');\n const underlyingSource = convertUnderlyingDefaultOrByteSource(rawUnderlyingSource, 'First parameter');\n InitializeReadableStream(this);\n if (underlyingSource.type === 'bytes') {\n if (strategy.size !== undefined) {\n throw new RangeError('The strategy for a byte stream cannot have a size function');\n }\n const highWaterMark = ExtractHighWaterMark(strategy, 0);\n SetUpReadableByteStreamControllerFromUnderlyingSource(this, underlyingSource, highWaterMark);\n }\n else {\n const sizeAlgorithm = ExtractSizeAlgorithm(strategy);\n const highWaterMark = ExtractHighWaterMark(strategy, 1);\n SetUpReadableStreamDefaultControllerFromUnderlyingSource(this, underlyingSource, highWaterMark, sizeAlgorithm);\n }\n }\n /**\n * Whether or not the readable stream is locked to a {@link ReadableStreamDefaultReader | reader}.\n */\n get locked() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('locked');\n }\n return IsReadableStreamLocked(this);\n }\n /**\n * Cancels the stream, signaling a loss of interest in the stream by a consumer.\n *\n * The supplied `reason` argument will be given to the underlying source's {@link UnderlyingSource.cancel | cancel()}\n * method, which might or might not use it.\n */\n cancel(reason = undefined) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('cancel'));\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('Cannot cancel a stream that already has a reader'));\n }\n return ReadableStreamCancel(this, reason);\n }\n getReader(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('getReader');\n }\n const options = convertReaderOptions(rawOptions, 'First parameter');\n if (options.mode === undefined) {\n return AcquireReadableStreamDefaultReader(this);\n }\n return AcquireReadableStreamBYOBReader(this);\n }\n pipeThrough(rawTransform, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('pipeThrough');\n }\n assertRequiredArgument(rawTransform, 1, 'pipeThrough');\n const transform = convertReadableWritablePair(rawTransform, 'First parameter');\n const options = convertPipeOptions(rawOptions, 'Second parameter');\n if (IsReadableStreamLocked(this)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked ReadableStream');\n }\n if (IsWritableStreamLocked(transform.writable)) {\n throw new TypeError('ReadableStream.prototype.pipeThrough cannot be used on a locked WritableStream');\n }\n const promise = ReadableStreamPipeTo(this, transform.writable, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n setPromiseIsHandledToTrue(promise);\n return transform.readable;\n }\n pipeTo(destination, rawOptions = {}) {\n if (!IsReadableStream(this)) {\n return promiseRejectedWith(streamBrandCheckException$1('pipeTo'));\n }\n if (destination === undefined) {\n return promiseRejectedWith(`Parameter 1 is required in 'pipeTo'.`);\n }\n if (!IsWritableStream(destination)) {\n return promiseRejectedWith(new TypeError(`ReadableStream.prototype.pipeTo's first argument must be a WritableStream`));\n }\n let options;\n try {\n options = convertPipeOptions(rawOptions, 'Second parameter');\n }\n catch (e) {\n return promiseRejectedWith(e);\n }\n if (IsReadableStreamLocked(this)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked ReadableStream'));\n }\n if (IsWritableStreamLocked(destination)) {\n return promiseRejectedWith(new TypeError('ReadableStream.prototype.pipeTo cannot be used on a locked WritableStream'));\n }\n return ReadableStreamPipeTo(this, destination, options.preventClose, options.preventAbort, options.preventCancel, options.signal);\n }\n /**\n * Tees this readable stream, returning a two-element array containing the two resulting branches as\n * new {@link ReadableStream} instances.\n *\n * Teeing a stream will lock it, preventing any other consumer from acquiring a reader.\n * To cancel the stream, cancel both of the resulting branches; a composite cancellation reason will then be\n * propagated to the stream's underlying source.\n *\n * Note that the chunks seen in each branch will be the same object. If the chunks are not immutable,\n * this could allow interference between the two branches.\n */\n tee() {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('tee');\n }\n const branches = ReadableStreamTee(this);\n return CreateArrayFromList(branches);\n }\n values(rawOptions = undefined) {\n if (!IsReadableStream(this)) {\n throw streamBrandCheckException$1('values');\n }\n const options = convertIteratorOptions(rawOptions, 'First parameter');\n return AcquireReadableStreamAsyncIterator(this, options.preventCancel);\n }\n }\n Object.defineProperties(ReadableStream.prototype, {\n cancel: { enumerable: true },\n getReader: { enumerable: true },\n pipeThrough: { enumerable: true },\n pipeTo: { enumerable: true },\n tee: { enumerable: true },\n values: { enumerable: true },\n locked: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'ReadableStream',\n configurable: true\n });\n }\n if (typeof SymbolPolyfill.asyncIterator === 'symbol') {\n Object.defineProperty(ReadableStream.prototype, SymbolPolyfill.asyncIterator, {\n value: ReadableStream.prototype.values,\n writable: true,\n configurable: true\n });\n }\n // Abstract operations for the ReadableStream.\n // Throws if and only if startAlgorithm throws.\n function CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark = 1, sizeAlgorithm = () => 1) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableStreamDefaultController.prototype);\n SetUpReadableStreamDefaultController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, highWaterMark, sizeAlgorithm);\n return stream;\n }\n // Throws if and only if startAlgorithm throws.\n function CreateReadableByteStream(startAlgorithm, pullAlgorithm, cancelAlgorithm) {\n const stream = Object.create(ReadableStream.prototype);\n InitializeReadableStream(stream);\n const controller = Object.create(ReadableByteStreamController.prototype);\n SetUpReadableByteStreamController(stream, controller, startAlgorithm, pullAlgorithm, cancelAlgorithm, 0, undefined);\n return stream;\n }\n function InitializeReadableStream(stream) {\n stream._state = 'readable';\n stream._reader = undefined;\n stream._storedError = undefined;\n stream._disturbed = false;\n }\n function IsReadableStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_readableStreamController')) {\n return false;\n }\n return x instanceof ReadableStream;\n }\n function IsReadableStreamLocked(stream) {\n if (stream._reader === undefined) {\n return false;\n }\n return true;\n }\n // ReadableStream API exposed for controllers.\n function ReadableStreamCancel(stream, reason) {\n stream._disturbed = true;\n if (stream._state === 'closed') {\n return promiseResolvedWith(undefined);\n }\n if (stream._state === 'errored') {\n return promiseRejectedWith(stream._storedError);\n }\n ReadableStreamClose(stream);\n const reader = stream._reader;\n if (reader !== undefined && IsReadableStreamBYOBReader(reader)) {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._closeSteps(undefined);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n const sourceCancelPromise = stream._readableStreamController[CancelSteps](reason);\n return transformPromiseWith(sourceCancelPromise, noop);\n }\n function ReadableStreamClose(stream) {\n stream._state = 'closed';\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseResolve(reader);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._closeSteps();\n });\n reader._readRequests = new SimpleQueue();\n }\n }\n function ReadableStreamError(stream, e) {\n stream._state = 'errored';\n stream._storedError = e;\n const reader = stream._reader;\n if (reader === undefined) {\n return;\n }\n defaultReaderClosedPromiseReject(reader, e);\n if (IsReadableStreamDefaultReader(reader)) {\n reader._readRequests.forEach(readRequest => {\n readRequest._errorSteps(e);\n });\n reader._readRequests = new SimpleQueue();\n }\n else {\n reader._readIntoRequests.forEach(readIntoRequest => {\n readIntoRequest._errorSteps(e);\n });\n reader._readIntoRequests = new SimpleQueue();\n }\n }\n // Helper functions for the ReadableStream.\n function streamBrandCheckException$1(name) {\n return new TypeError(`ReadableStream.prototype.${name} can only be used on a ReadableStream`);\n }\n\n function convertQueuingStrategyInit(init, context) {\n assertDictionary(init, context);\n const highWaterMark = init === null || init === void 0 ? void 0 : init.highWaterMark;\n assertRequiredField(highWaterMark, 'highWaterMark', 'QueuingStrategyInit');\n return {\n highWaterMark: convertUnrestrictedDouble(highWaterMark)\n };\n }\n\n // The size function must not have a prototype property nor be a constructor\n const byteLengthSizeFunction = (chunk) => {\n return chunk.byteLength;\n };\n try {\n Object.defineProperty(byteLengthSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of bytes in each chunk.\n *\n * @public\n */\n class ByteLengthQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'ByteLengthQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._byteLengthQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('highWaterMark');\n }\n return this._byteLengthQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by returning the value of its `byteLength` property.\n */\n get size() {\n if (!IsByteLengthQueuingStrategy(this)) {\n throw byteLengthBrandCheckException('size');\n }\n return byteLengthSizeFunction;\n }\n }\n Object.defineProperties(ByteLengthQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(ByteLengthQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'ByteLengthQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the ByteLengthQueuingStrategy.\n function byteLengthBrandCheckException(name) {\n return new TypeError(`ByteLengthQueuingStrategy.prototype.${name} can only be used on a ByteLengthQueuingStrategy`);\n }\n function IsByteLengthQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_byteLengthQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof ByteLengthQueuingStrategy;\n }\n\n // The size function must not have a prototype property nor be a constructor\n const countSizeFunction = () => {\n return 1;\n };\n try {\n Object.defineProperty(countSizeFunction, 'name', {\n value: 'size',\n configurable: true\n });\n }\n catch (_a) {\n // This property is non-configurable in older browsers, so ignore if this throws.\n // https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Function/name#browser_compatibility\n }\n /**\n * A queuing strategy that counts the number of chunks.\n *\n * @public\n */\n class CountQueuingStrategy {\n constructor(options) {\n assertRequiredArgument(options, 1, 'CountQueuingStrategy');\n options = convertQueuingStrategyInit(options, 'First parameter');\n this._countQueuingStrategyHighWaterMark = options.highWaterMark;\n }\n /**\n * Returns the high water mark provided to the constructor.\n */\n get highWaterMark() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('highWaterMark');\n }\n return this._countQueuingStrategyHighWaterMark;\n }\n /**\n * Measures the size of `chunk` by always returning 1.\n * This ensures that the total queue size is a count of the number of chunks in the queue.\n */\n get size() {\n if (!IsCountQueuingStrategy(this)) {\n throw countBrandCheckException('size');\n }\n return countSizeFunction;\n }\n }\n Object.defineProperties(CountQueuingStrategy.prototype, {\n highWaterMark: { enumerable: true },\n size: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(CountQueuingStrategy.prototype, SymbolPolyfill.toStringTag, {\n value: 'CountQueuingStrategy',\n configurable: true\n });\n }\n // Helper functions for the CountQueuingStrategy.\n function countBrandCheckException(name) {\n return new TypeError(`CountQueuingStrategy.prototype.${name} can only be used on a CountQueuingStrategy`);\n }\n function IsCountQueuingStrategy(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_countQueuingStrategyHighWaterMark')) {\n return false;\n }\n return x instanceof CountQueuingStrategy;\n }\n\n function convertTransformer(original, context) {\n assertDictionary(original, context);\n const flush = original === null || original === void 0 ? void 0 : original.flush;\n const readableType = original === null || original === void 0 ? void 0 : original.readableType;\n const start = original === null || original === void 0 ? void 0 : original.start;\n const transform = original === null || original === void 0 ? void 0 : original.transform;\n const writableType = original === null || original === void 0 ? void 0 : original.writableType;\n return {\n flush: flush === undefined ?\n undefined :\n convertTransformerFlushCallback(flush, original, `${context} has member 'flush' that`),\n readableType,\n start: start === undefined ?\n undefined :\n convertTransformerStartCallback(start, original, `${context} has member 'start' that`),\n transform: transform === undefined ?\n undefined :\n convertTransformerTransformCallback(transform, original, `${context} has member 'transform' that`),\n writableType\n };\n }\n function convertTransformerFlushCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => promiseCall(fn, original, [controller]);\n }\n function convertTransformerStartCallback(fn, original, context) {\n assertFunction(fn, context);\n return (controller) => reflectCall(fn, original, [controller]);\n }\n function convertTransformerTransformCallback(fn, original, context) {\n assertFunction(fn, context);\n return (chunk, controller) => promiseCall(fn, original, [chunk, controller]);\n }\n\n // Class TransformStream\n /**\n * A transform stream consists of a pair of streams: a {@link WritableStream | writable stream},\n * known as its writable side, and a {@link ReadableStream | readable stream}, known as its readable side.\n * In a manner specific to the transform stream in question, writes to the writable side result in new data being\n * made available for reading from the readable side.\n *\n * @public\n */\n class TransformStream {\n constructor(rawTransformer = {}, rawWritableStrategy = {}, rawReadableStrategy = {}) {\n if (rawTransformer === undefined) {\n rawTransformer = null;\n }\n const writableStrategy = convertQueuingStrategy(rawWritableStrategy, 'Second parameter');\n const readableStrategy = convertQueuingStrategy(rawReadableStrategy, 'Third parameter');\n const transformer = convertTransformer(rawTransformer, 'First parameter');\n if (transformer.readableType !== undefined) {\n throw new RangeError('Invalid readableType specified');\n }\n if (transformer.writableType !== undefined) {\n throw new RangeError('Invalid writableType specified');\n }\n const readableHighWaterMark = ExtractHighWaterMark(readableStrategy, 0);\n const readableSizeAlgorithm = ExtractSizeAlgorithm(readableStrategy);\n const writableHighWaterMark = ExtractHighWaterMark(writableStrategy, 1);\n const writableSizeAlgorithm = ExtractSizeAlgorithm(writableStrategy);\n let startPromise_resolve;\n const startPromise = newPromise(resolve => {\n startPromise_resolve = resolve;\n });\n InitializeTransformStream(this, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n SetUpTransformStreamDefaultControllerFromTransformer(this, transformer);\n if (transformer.start !== undefined) {\n startPromise_resolve(transformer.start(this._transformStreamController));\n }\n else {\n startPromise_resolve(undefined);\n }\n }\n /**\n * The readable side of the transform stream.\n */\n get readable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('readable');\n }\n return this._readable;\n }\n /**\n * The writable side of the transform stream.\n */\n get writable() {\n if (!IsTransformStream(this)) {\n throw streamBrandCheckException('writable');\n }\n return this._writable;\n }\n }\n Object.defineProperties(TransformStream.prototype, {\n readable: { enumerable: true },\n writable: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStream.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStream',\n configurable: true\n });\n }\n function InitializeTransformStream(stream, startPromise, writableHighWaterMark, writableSizeAlgorithm, readableHighWaterMark, readableSizeAlgorithm) {\n function startAlgorithm() {\n return startPromise;\n }\n function writeAlgorithm(chunk) {\n return TransformStreamDefaultSinkWriteAlgorithm(stream, chunk);\n }\n function abortAlgorithm(reason) {\n return TransformStreamDefaultSinkAbortAlgorithm(stream, reason);\n }\n function closeAlgorithm() {\n return TransformStreamDefaultSinkCloseAlgorithm(stream);\n }\n stream._writable = CreateWritableStream(startAlgorithm, writeAlgorithm, closeAlgorithm, abortAlgorithm, writableHighWaterMark, writableSizeAlgorithm);\n function pullAlgorithm() {\n return TransformStreamDefaultSourcePullAlgorithm(stream);\n }\n function cancelAlgorithm(reason) {\n TransformStreamErrorWritableAndUnblockWrite(stream, reason);\n return promiseResolvedWith(undefined);\n }\n stream._readable = CreateReadableStream(startAlgorithm, pullAlgorithm, cancelAlgorithm, readableHighWaterMark, readableSizeAlgorithm);\n // The [[backpressure]] slot is set to undefined so that it can be initialised by TransformStreamSetBackpressure.\n stream._backpressure = undefined;\n stream._backpressureChangePromise = undefined;\n stream._backpressureChangePromise_resolve = undefined;\n TransformStreamSetBackpressure(stream, true);\n stream._transformStreamController = undefined;\n }\n function IsTransformStream(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_transformStreamController')) {\n return false;\n }\n return x instanceof TransformStream;\n }\n // This is a no-op if both sides are already errored.\n function TransformStreamError(stream, e) {\n ReadableStreamDefaultControllerError(stream._readable._readableStreamController, e);\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n }\n function TransformStreamErrorWritableAndUnblockWrite(stream, e) {\n TransformStreamDefaultControllerClearAlgorithms(stream._transformStreamController);\n WritableStreamDefaultControllerErrorIfNeeded(stream._writable._writableStreamController, e);\n if (stream._backpressure) {\n // Pretend that pull() was called to permit any pending write() calls to complete. TransformStreamSetBackpressure()\n // cannot be called from enqueue() or pull() once the ReadableStream is errored, so this will will be the final time\n // _backpressure is set.\n TransformStreamSetBackpressure(stream, false);\n }\n }\n function TransformStreamSetBackpressure(stream, backpressure) {\n // Passes also when called during construction.\n if (stream._backpressureChangePromise !== undefined) {\n stream._backpressureChangePromise_resolve();\n }\n stream._backpressureChangePromise = newPromise(resolve => {\n stream._backpressureChangePromise_resolve = resolve;\n });\n stream._backpressure = backpressure;\n }\n // Class TransformStreamDefaultController\n /**\n * Allows control of the {@link ReadableStream} and {@link WritableStream} of the associated {@link TransformStream}.\n *\n * @public\n */\n class TransformStreamDefaultController {\n constructor() {\n throw new TypeError('Illegal constructor');\n }\n /**\n * Returns the desired size to fill the readable side’s internal queue. It can be negative, if the queue is over-full.\n */\n get desiredSize() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('desiredSize');\n }\n const readableController = this._controlledTransformStream._readable._readableStreamController;\n return ReadableStreamDefaultControllerGetDesiredSize(readableController);\n }\n enqueue(chunk = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('enqueue');\n }\n TransformStreamDefaultControllerEnqueue(this, chunk);\n }\n /**\n * Errors both the readable side and the writable side of the controlled transform stream, making all future\n * interactions with it fail with the given error `e`. Any chunks queued for transformation will be discarded.\n */\n error(reason = undefined) {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('error');\n }\n TransformStreamDefaultControllerError(this, reason);\n }\n /**\n * Closes the readable side and errors the writable side of the controlled transform stream. This is useful when the\n * transformer only needs to consume a portion of the chunks written to the writable side.\n */\n terminate() {\n if (!IsTransformStreamDefaultController(this)) {\n throw defaultControllerBrandCheckException('terminate');\n }\n TransformStreamDefaultControllerTerminate(this);\n }\n }\n Object.defineProperties(TransformStreamDefaultController.prototype, {\n enqueue: { enumerable: true },\n error: { enumerable: true },\n terminate: { enumerable: true },\n desiredSize: { enumerable: true }\n });\n if (typeof SymbolPolyfill.toStringTag === 'symbol') {\n Object.defineProperty(TransformStreamDefaultController.prototype, SymbolPolyfill.toStringTag, {\n value: 'TransformStreamDefaultController',\n configurable: true\n });\n }\n // Transform Stream Default Controller Abstract Operations\n function IsTransformStreamDefaultController(x) {\n if (!typeIsObject(x)) {\n return false;\n }\n if (!Object.prototype.hasOwnProperty.call(x, '_controlledTransformStream')) {\n return false;\n }\n return x instanceof TransformStreamDefaultController;\n }\n function SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm) {\n controller._controlledTransformStream = stream;\n stream._transformStreamController = controller;\n controller._transformAlgorithm = transformAlgorithm;\n controller._flushAlgorithm = flushAlgorithm;\n }\n function SetUpTransformStreamDefaultControllerFromTransformer(stream, transformer) {\n const controller = Object.create(TransformStreamDefaultController.prototype);\n let transformAlgorithm = (chunk) => {\n try {\n TransformStreamDefaultControllerEnqueue(controller, chunk);\n return promiseResolvedWith(undefined);\n }\n catch (transformResultE) {\n return promiseRejectedWith(transformResultE);\n }\n };\n let flushAlgorithm = () => promiseResolvedWith(undefined);\n if (transformer.transform !== undefined) {\n transformAlgorithm = chunk => transformer.transform(chunk, controller);\n }\n if (transformer.flush !== undefined) {\n flushAlgorithm = () => transformer.flush(controller);\n }\n SetUpTransformStreamDefaultController(stream, controller, transformAlgorithm, flushAlgorithm);\n }\n function TransformStreamDefaultControllerClearAlgorithms(controller) {\n controller._transformAlgorithm = undefined;\n controller._flushAlgorithm = undefined;\n }\n function TransformStreamDefaultControllerEnqueue(controller, chunk) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n if (!ReadableStreamDefaultControllerCanCloseOrEnqueue(readableController)) {\n throw new TypeError('Readable side is not in a state that permits enqueue');\n }\n // We throttle transform invocations based on the backpressure of the ReadableStream, but we still\n // accept TransformStreamDefaultControllerEnqueue() calls.\n try {\n ReadableStreamDefaultControllerEnqueue(readableController, chunk);\n }\n catch (e) {\n // This happens when readableStrategy.size() throws.\n TransformStreamErrorWritableAndUnblockWrite(stream, e);\n throw stream._readable._storedError;\n }\n const backpressure = ReadableStreamDefaultControllerHasBackpressure(readableController);\n if (backpressure !== stream._backpressure) {\n TransformStreamSetBackpressure(stream, true);\n }\n }\n function TransformStreamDefaultControllerError(controller, e) {\n TransformStreamError(controller._controlledTransformStream, e);\n }\n function TransformStreamDefaultControllerPerformTransform(controller, chunk) {\n const transformPromise = controller._transformAlgorithm(chunk);\n return transformPromiseWith(transformPromise, undefined, r => {\n TransformStreamError(controller._controlledTransformStream, r);\n throw r;\n });\n }\n function TransformStreamDefaultControllerTerminate(controller) {\n const stream = controller._controlledTransformStream;\n const readableController = stream._readable._readableStreamController;\n ReadableStreamDefaultControllerClose(readableController);\n const error = new TypeError('TransformStream terminated');\n TransformStreamErrorWritableAndUnblockWrite(stream, error);\n }\n // TransformStreamDefaultSink Algorithms\n function TransformStreamDefaultSinkWriteAlgorithm(stream, chunk) {\n const controller = stream._transformStreamController;\n if (stream._backpressure) {\n const backpressureChangePromise = stream._backpressureChangePromise;\n return transformPromiseWith(backpressureChangePromise, () => {\n const writable = stream._writable;\n const state = writable._state;\n if (state === 'erroring') {\n throw writable._storedError;\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n });\n }\n return TransformStreamDefaultControllerPerformTransform(controller, chunk);\n }\n function TransformStreamDefaultSinkAbortAlgorithm(stream, reason) {\n // abort() is not called synchronously, so it is possible for abort() to be called when the stream is already\n // errored.\n TransformStreamError(stream, reason);\n return promiseResolvedWith(undefined);\n }\n function TransformStreamDefaultSinkCloseAlgorithm(stream) {\n // stream._readable cannot change after construction, so caching it across a call to user code is safe.\n const readable = stream._readable;\n const controller = stream._transformStreamController;\n const flushPromise = controller._flushAlgorithm();\n TransformStreamDefaultControllerClearAlgorithms(controller);\n // Return a promise that is fulfilled with undefined on success.\n return transformPromiseWith(flushPromise, () => {\n if (readable._state === 'errored') {\n throw readable._storedError;\n }\n ReadableStreamDefaultControllerClose(readable._readableStreamController);\n }, r => {\n TransformStreamError(stream, r);\n throw readable._storedError;\n });\n }\n // TransformStreamDefaultSource Algorithms\n function TransformStreamDefaultSourcePullAlgorithm(stream) {\n // Invariant. Enforced by the promises returned by start() and pull().\n TransformStreamSetBackpressure(stream, false);\n // Prevent the next pull() call until there is backpressure.\n return stream._backpressureChangePromise;\n }\n // Helper functions for the TransformStreamDefaultController.\n function defaultControllerBrandCheckException(name) {\n return new TypeError(`TransformStreamDefaultController.prototype.${name} can only be used on a TransformStreamDefaultController`);\n }\n // Helper functions for the TransformStream.\n function streamBrandCheckException(name) {\n return new TypeError(`TransformStream.prototype.${name} can only be used on a TransformStream`);\n }\n\n exports.ByteLengthQueuingStrategy = ByteLengthQueuingStrategy;\n exports.CountQueuingStrategy = CountQueuingStrategy;\n exports.ReadableByteStreamController = ReadableByteStreamController;\n exports.ReadableStream = ReadableStream;\n exports.ReadableStreamBYOBReader = ReadableStreamBYOBReader;\n exports.ReadableStreamBYOBRequest = ReadableStreamBYOBRequest;\n exports.ReadableStreamDefaultController = ReadableStreamDefaultController;\n exports.ReadableStreamDefaultReader = ReadableStreamDefaultReader;\n exports.TransformStream = TransformStream;\n exports.TransformStreamDefaultController = TransformStreamDefaultController;\n exports.WritableStream = WritableStream;\n exports.WritableStreamDefaultController = WritableStreamDefaultController;\n exports.WritableStreamDefaultWriter = WritableStreamDefaultWriter;\n\n Object.defineProperty(exports, '__esModule', { value: true });\n\n})));\n//# sourceMappingURL=ponyfill.es2018.js.map\n","\"use strict\";\n\nvar conversions = {};\nmodule.exports = conversions;\n\nfunction sign(x) {\n return x < 0 ? -1 : 1;\n}\n\nfunction evenRound(x) {\n // Round x to the nearest integer, choosing the even integer if it lies halfway between two.\n if ((x % 1) === 0.5 && (x & 1) === 0) { // [even number].5; round down (i.e. floor)\n return Math.floor(x);\n } else {\n return Math.round(x);\n }\n}\n\nfunction createNumberConversion(bitLength, typeOpts) {\n if (!typeOpts.unsigned) {\n --bitLength;\n }\n const lowerBound = typeOpts.unsigned ? 0 : -Math.pow(2, bitLength);\n const upperBound = Math.pow(2, bitLength) - 1;\n\n const moduloVal = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength) : Math.pow(2, bitLength);\n const moduloBound = typeOpts.moduloBitLength ? Math.pow(2, typeOpts.moduloBitLength - 1) : Math.pow(2, bitLength - 1);\n\n return function(V, opts) {\n if (!opts) opts = {};\n\n let x = +V;\n\n if (opts.enforceRange) {\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite number\");\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n if (x < lowerBound || x > upperBound) {\n throw new TypeError(\"Argument is not in byte range\");\n }\n\n return x;\n }\n\n if (!isNaN(x) && opts.clamp) {\n x = evenRound(x);\n\n if (x < lowerBound) x = lowerBound;\n if (x > upperBound) x = upperBound;\n return x;\n }\n\n if (!Number.isFinite(x) || x === 0) {\n return 0;\n }\n\n x = sign(x) * Math.floor(Math.abs(x));\n x = x % moduloVal;\n\n if (!typeOpts.unsigned && x >= moduloBound) {\n return x - moduloVal;\n } else if (typeOpts.unsigned) {\n if (x < 0) {\n x += moduloVal;\n } else if (x === -0) { // don't return negative zero\n return 0;\n }\n }\n\n return x;\n }\n}\n\nconversions[\"void\"] = function () {\n return undefined;\n};\n\nconversions[\"boolean\"] = function (val) {\n return !!val;\n};\n\nconversions[\"byte\"] = createNumberConversion(8, { unsigned: false });\nconversions[\"octet\"] = createNumberConversion(8, { unsigned: true });\n\nconversions[\"short\"] = createNumberConversion(16, { unsigned: false });\nconversions[\"unsigned short\"] = createNumberConversion(16, { unsigned: true });\n\nconversions[\"long\"] = createNumberConversion(32, { unsigned: false });\nconversions[\"unsigned long\"] = createNumberConversion(32, { unsigned: true });\n\nconversions[\"long long\"] = createNumberConversion(32, { unsigned: false, moduloBitLength: 64 });\nconversions[\"unsigned long long\"] = createNumberConversion(32, { unsigned: true, moduloBitLength: 64 });\n\nconversions[\"double\"] = function (V) {\n const x = +V;\n\n if (!Number.isFinite(x)) {\n throw new TypeError(\"Argument is not a finite floating-point value\");\n }\n\n return x;\n};\n\nconversions[\"unrestricted double\"] = function (V) {\n const x = +V;\n\n if (isNaN(x)) {\n throw new TypeError(\"Argument is NaN\");\n }\n\n return x;\n};\n\n// not quite valid, but good enough for JS\nconversions[\"float\"] = conversions[\"double\"];\nconversions[\"unrestricted float\"] = conversions[\"unrestricted double\"];\n\nconversions[\"DOMString\"] = function (V, opts) {\n if (!opts) opts = {};\n\n if (opts.treatNullAsEmptyString && V === null) {\n return \"\";\n }\n\n return String(V);\n};\n\nconversions[\"ByteString\"] = function (V, opts) {\n const x = String(V);\n let c = undefined;\n for (let i = 0; (c = x.codePointAt(i)) !== undefined; ++i) {\n if (c > 255) {\n throw new TypeError(\"Argument is not a valid bytestring\");\n }\n }\n\n return x;\n};\n\nconversions[\"USVString\"] = function (V) {\n const S = String(V);\n const n = S.length;\n const U = [];\n for (let i = 0; i < n; ++i) {\n const c = S.charCodeAt(i);\n if (c < 0xD800 || c > 0xDFFF) {\n U.push(String.fromCodePoint(c));\n } else if (0xDC00 <= c && c <= 0xDFFF) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n if (i === n - 1) {\n U.push(String.fromCodePoint(0xFFFD));\n } else {\n const d = S.charCodeAt(i + 1);\n if (0xDC00 <= d && d <= 0xDFFF) {\n const a = c & 0x3FF;\n const b = d & 0x3FF;\n U.push(String.fromCodePoint((2 << 15) + (2 << 9) * a + b));\n ++i;\n } else {\n U.push(String.fromCodePoint(0xFFFD));\n }\n }\n }\n }\n\n return U.join('');\n};\n\nconversions[\"Date\"] = function (V, opts) {\n if (!(V instanceof Date)) {\n throw new TypeError(\"Argument is not a Date object\");\n }\n if (isNaN(V)) {\n return undefined;\n }\n\n return V;\n};\n\nconversions[\"RegExp\"] = function (V, opts) {\n if (!(V instanceof RegExp)) {\n V = new RegExp(V);\n }\n\n return V;\n};\n","\"use strict\";\nconst usm = require(\"./url-state-machine\");\n\nexports.implementation = class URLImpl {\n constructor(constructorArgs) {\n const url = constructorArgs[0];\n const base = constructorArgs[1];\n\n let parsedBase = null;\n if (base !== undefined) {\n parsedBase = usm.basicURLParse(base);\n if (parsedBase === \"failure\") {\n throw new TypeError(\"Invalid base URL\");\n }\n }\n\n const parsedURL = usm.basicURLParse(url, { baseURL: parsedBase });\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n\n // TODO: query stuff\n }\n\n get href() {\n return usm.serializeURL(this._url);\n }\n\n set href(v) {\n const parsedURL = usm.basicURLParse(v);\n if (parsedURL === \"failure\") {\n throw new TypeError(\"Invalid URL\");\n }\n\n this._url = parsedURL;\n }\n\n get origin() {\n return usm.serializeURLOrigin(this._url);\n }\n\n get protocol() {\n return this._url.scheme + \":\";\n }\n\n set protocol(v) {\n usm.basicURLParse(v + \":\", { url: this._url, stateOverride: \"scheme start\" });\n }\n\n get username() {\n return this._url.username;\n }\n\n set username(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setTheUsername(this._url, v);\n }\n\n get password() {\n return this._url.password;\n }\n\n set password(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n usm.setThePassword(this._url, v);\n }\n\n get host() {\n const url = this._url;\n\n if (url.host === null) {\n return \"\";\n }\n\n if (url.port === null) {\n return usm.serializeHost(url.host);\n }\n\n return usm.serializeHost(url.host) + \":\" + usm.serializeInteger(url.port);\n }\n\n set host(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"host\" });\n }\n\n get hostname() {\n if (this._url.host === null) {\n return \"\";\n }\n\n return usm.serializeHost(this._url.host);\n }\n\n set hostname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n usm.basicURLParse(v, { url: this._url, stateOverride: \"hostname\" });\n }\n\n get port() {\n if (this._url.port === null) {\n return \"\";\n }\n\n return usm.serializeInteger(this._url.port);\n }\n\n set port(v) {\n if (usm.cannotHaveAUsernamePasswordPort(this._url)) {\n return;\n }\n\n if (v === \"\") {\n this._url.port = null;\n } else {\n usm.basicURLParse(v, { url: this._url, stateOverride: \"port\" });\n }\n }\n\n get pathname() {\n if (this._url.cannotBeABaseURL) {\n return this._url.path[0];\n }\n\n if (this._url.path.length === 0) {\n return \"\";\n }\n\n return \"/\" + this._url.path.join(\"/\");\n }\n\n set pathname(v) {\n if (this._url.cannotBeABaseURL) {\n return;\n }\n\n this._url.path = [];\n usm.basicURLParse(v, { url: this._url, stateOverride: \"path start\" });\n }\n\n get search() {\n if (this._url.query === null || this._url.query === \"\") {\n return \"\";\n }\n\n return \"?\" + this._url.query;\n }\n\n set search(v) {\n // TODO: query stuff\n\n const url = this._url;\n\n if (v === \"\") {\n url.query = null;\n return;\n }\n\n const input = v[0] === \"?\" ? v.substring(1) : v;\n url.query = \"\";\n usm.basicURLParse(input, { url, stateOverride: \"query\" });\n }\n\n get hash() {\n if (this._url.fragment === null || this._url.fragment === \"\") {\n return \"\";\n }\n\n return \"#\" + this._url.fragment;\n }\n\n set hash(v) {\n if (v === \"\") {\n this._url.fragment = null;\n return;\n }\n\n const input = v[0] === \"#\" ? v.substring(1) : v;\n this._url.fragment = \"\";\n usm.basicURLParse(input, { url: this._url, stateOverride: \"fragment\" });\n }\n\n toJSON() {\n return this.href;\n }\n};\n","\"use strict\";\n\nconst conversions = require(\"webidl-conversions\");\nconst utils = require(\"./utils.js\");\nconst Impl = require(\".//URL-impl.js\");\n\nconst impl = utils.implSymbol;\n\nfunction URL(url) {\n if (!this || this[impl] || !(this instanceof URL)) {\n throw new TypeError(\"Failed to construct 'URL': Please use the 'new' operator, this DOM object constructor cannot be called as a function.\");\n }\n if (arguments.length < 1) {\n throw new TypeError(\"Failed to construct 'URL': 1 argument required, but only \" + arguments.length + \" present.\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 2; ++i) {\n args[i] = arguments[i];\n }\n args[0] = conversions[\"USVString\"](args[0]);\n if (args[1] !== undefined) {\n args[1] = conversions[\"USVString\"](args[1]);\n }\n\n module.exports.setup(this, args);\n}\n\nURL.prototype.toJSON = function toJSON() {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n const args = [];\n for (let i = 0; i < arguments.length && i < 0; ++i) {\n args[i] = arguments[i];\n }\n return this[impl].toJSON.apply(this[impl], args);\n};\nObject.defineProperty(URL.prototype, \"href\", {\n get() {\n return this[impl].href;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].href = V;\n },\n enumerable: true,\n configurable: true\n});\n\nURL.prototype.toString = function () {\n if (!this || !module.exports.is(this)) {\n throw new TypeError(\"Illegal invocation\");\n }\n return this.href;\n};\n\nObject.defineProperty(URL.prototype, \"origin\", {\n get() {\n return this[impl].origin;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"protocol\", {\n get() {\n return this[impl].protocol;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].protocol = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"username\", {\n get() {\n return this[impl].username;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].username = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"password\", {\n get() {\n return this[impl].password;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].password = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"host\", {\n get() {\n return this[impl].host;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].host = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hostname\", {\n get() {\n return this[impl].hostname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hostname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"port\", {\n get() {\n return this[impl].port;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].port = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"pathname\", {\n get() {\n return this[impl].pathname;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].pathname = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"search\", {\n get() {\n return this[impl].search;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].search = V;\n },\n enumerable: true,\n configurable: true\n});\n\nObject.defineProperty(URL.prototype, \"hash\", {\n get() {\n return this[impl].hash;\n },\n set(V) {\n V = conversions[\"USVString\"](V);\n this[impl].hash = V;\n },\n enumerable: true,\n configurable: true\n});\n\n\nmodule.exports = {\n is(obj) {\n return !!obj && obj[impl] instanceof Impl.implementation;\n },\n create(constructorArgs, privateData) {\n let obj = Object.create(URL.prototype);\n this.setup(obj, constructorArgs, privateData);\n return obj;\n },\n setup(obj, constructorArgs, privateData) {\n if (!privateData) privateData = {};\n privateData.wrapper = obj;\n\n obj[impl] = new Impl.implementation(constructorArgs, privateData);\n obj[impl][utils.wrapperSymbol] = obj;\n },\n interface: URL,\n expose: {\n Window: { URL: URL },\n Worker: { URL: URL }\n }\n};\n\n","\"use strict\";\n\nexports.URL = require(\"./URL\").interface;\nexports.serializeURL = require(\"./url-state-machine\").serializeURL;\nexports.serializeURLOrigin = require(\"./url-state-machine\").serializeURLOrigin;\nexports.basicURLParse = require(\"./url-state-machine\").basicURLParse;\nexports.setTheUsername = require(\"./url-state-machine\").setTheUsername;\nexports.setThePassword = require(\"./url-state-machine\").setThePassword;\nexports.serializeHost = require(\"./url-state-machine\").serializeHost;\nexports.serializeInteger = require(\"./url-state-machine\").serializeInteger;\nexports.parseURL = require(\"./url-state-machine\").parseURL;\n","\"use strict\";\r\nconst punycode = require(\"punycode\");\r\nconst tr46 = require(\"tr46\");\r\n\r\nconst specialSchemes = {\r\n ftp: 21,\r\n file: null,\r\n gopher: 70,\r\n http: 80,\r\n https: 443,\r\n ws: 80,\r\n wss: 443\r\n};\r\n\r\nconst failure = Symbol(\"failure\");\r\n\r\nfunction countSymbols(str) {\r\n return punycode.ucs2.decode(str).length;\r\n}\r\n\r\nfunction at(input, idx) {\r\n const c = input[idx];\r\n return isNaN(c) ? undefined : String.fromCodePoint(c);\r\n}\r\n\r\nfunction isASCIIDigit(c) {\r\n return c >= 0x30 && c <= 0x39;\r\n}\r\n\r\nfunction isASCIIAlpha(c) {\r\n return (c >= 0x41 && c <= 0x5A) || (c >= 0x61 && c <= 0x7A);\r\n}\r\n\r\nfunction isASCIIAlphanumeric(c) {\r\n return isASCIIAlpha(c) || isASCIIDigit(c);\r\n}\r\n\r\nfunction isASCIIHex(c) {\r\n return isASCIIDigit(c) || (c >= 0x41 && c <= 0x46) || (c >= 0x61 && c <= 0x66);\r\n}\r\n\r\nfunction isSingleDot(buffer) {\r\n return buffer === \".\" || buffer.toLowerCase() === \"%2e\";\r\n}\r\n\r\nfunction isDoubleDot(buffer) {\r\n buffer = buffer.toLowerCase();\r\n return buffer === \"..\" || buffer === \"%2e.\" || buffer === \".%2e\" || buffer === \"%2e%2e\";\r\n}\r\n\r\nfunction isWindowsDriveLetterCodePoints(cp1, cp2) {\r\n return isASCIIAlpha(cp1) && (cp2 === 58 || cp2 === 124);\r\n}\r\n\r\nfunction isWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && (string[1] === \":\" || string[1] === \"|\");\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetterString(string) {\r\n return string.length === 2 && isASCIIAlpha(string.codePointAt(0)) && string[1] === \":\";\r\n}\r\n\r\nfunction containsForbiddenHostCodePoint(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|%|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction containsForbiddenHostCodePointExcludingPercent(string) {\r\n return string.search(/\\u0000|\\u0009|\\u000A|\\u000D|\\u0020|#|\\/|:|\\?|@|\\[|\\\\|\\]/) !== -1;\r\n}\r\n\r\nfunction isSpecialScheme(scheme) {\r\n return specialSchemes[scheme] !== undefined;\r\n}\r\n\r\nfunction isSpecial(url) {\r\n return isSpecialScheme(url.scheme);\r\n}\r\n\r\nfunction defaultPort(scheme) {\r\n return specialSchemes[scheme];\r\n}\r\n\r\nfunction percentEncode(c) {\r\n let hex = c.toString(16).toUpperCase();\r\n if (hex.length === 1) {\r\n hex = \"0\" + hex;\r\n }\r\n\r\n return \"%\" + hex;\r\n}\r\n\r\nfunction utf8PercentEncode(c) {\r\n const buf = new Buffer(c);\r\n\r\n let str = \"\";\r\n\r\n for (let i = 0; i < buf.length; ++i) {\r\n str += percentEncode(buf[i]);\r\n }\r\n\r\n return str;\r\n}\r\n\r\nfunction utf8PercentDecode(str) {\r\n const input = new Buffer(str);\r\n const output = [];\r\n for (let i = 0; i < input.length; ++i) {\r\n if (input[i] !== 37) {\r\n output.push(input[i]);\r\n } else if (input[i] === 37 && isASCIIHex(input[i + 1]) && isASCIIHex(input[i + 2])) {\r\n output.push(parseInt(input.slice(i + 1, i + 3).toString(), 16));\r\n i += 2;\r\n } else {\r\n output.push(input[i]);\r\n }\r\n }\r\n return new Buffer(output).toString();\r\n}\r\n\r\nfunction isC0ControlPercentEncode(c) {\r\n return c <= 0x1F || c > 0x7E;\r\n}\r\n\r\nconst extraPathPercentEncodeSet = new Set([32, 34, 35, 60, 62, 63, 96, 123, 125]);\r\nfunction isPathPercentEncode(c) {\r\n return isC0ControlPercentEncode(c) || extraPathPercentEncodeSet.has(c);\r\n}\r\n\r\nconst extraUserinfoPercentEncodeSet =\r\n new Set([47, 58, 59, 61, 64, 91, 92, 93, 94, 124]);\r\nfunction isUserinfoPercentEncode(c) {\r\n return isPathPercentEncode(c) || extraUserinfoPercentEncodeSet.has(c);\r\n}\r\n\r\nfunction percentEncodeChar(c, encodeSetPredicate) {\r\n const cStr = String.fromCodePoint(c);\r\n\r\n if (encodeSetPredicate(c)) {\r\n return utf8PercentEncode(cStr);\r\n }\r\n\r\n return cStr;\r\n}\r\n\r\nfunction parseIPv4Number(input) {\r\n let R = 10;\r\n\r\n if (input.length >= 2 && input.charAt(0) === \"0\" && input.charAt(1).toLowerCase() === \"x\") {\r\n input = input.substring(2);\r\n R = 16;\r\n } else if (input.length >= 2 && input.charAt(0) === \"0\") {\r\n input = input.substring(1);\r\n R = 8;\r\n }\r\n\r\n if (input === \"\") {\r\n return 0;\r\n }\r\n\r\n const regex = R === 10 ? /[^0-9]/ : (R === 16 ? /[^0-9A-Fa-f]/ : /[^0-7]/);\r\n if (regex.test(input)) {\r\n return failure;\r\n }\r\n\r\n return parseInt(input, R);\r\n}\r\n\r\nfunction parseIPv4(input) {\r\n const parts = input.split(\".\");\r\n if (parts[parts.length - 1] === \"\") {\r\n if (parts.length > 1) {\r\n parts.pop();\r\n }\r\n }\r\n\r\n if (parts.length > 4) {\r\n return input;\r\n }\r\n\r\n const numbers = [];\r\n for (const part of parts) {\r\n if (part === \"\") {\r\n return input;\r\n }\r\n const n = parseIPv4Number(part);\r\n if (n === failure) {\r\n return input;\r\n }\r\n\r\n numbers.push(n);\r\n }\r\n\r\n for (let i = 0; i < numbers.length - 1; ++i) {\r\n if (numbers[i] > 255) {\r\n return failure;\r\n }\r\n }\r\n if (numbers[numbers.length - 1] >= Math.pow(256, 5 - numbers.length)) {\r\n return failure;\r\n }\r\n\r\n let ipv4 = numbers.pop();\r\n let counter = 0;\r\n\r\n for (const n of numbers) {\r\n ipv4 += n * Math.pow(256, 3 - counter);\r\n ++counter;\r\n }\r\n\r\n return ipv4;\r\n}\r\n\r\nfunction serializeIPv4(address) {\r\n let output = \"\";\r\n let n = address;\r\n\r\n for (let i = 1; i <= 4; ++i) {\r\n output = String(n % 256) + output;\r\n if (i !== 4) {\r\n output = \".\" + output;\r\n }\r\n n = Math.floor(n / 256);\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseIPv6(input) {\r\n const address = [0, 0, 0, 0, 0, 0, 0, 0];\r\n let pieceIndex = 0;\r\n let compress = null;\r\n let pointer = 0;\r\n\r\n input = punycode.ucs2.decode(input);\r\n\r\n if (input[pointer] === 58) {\r\n if (input[pointer + 1] !== 58) {\r\n return failure;\r\n }\r\n\r\n pointer += 2;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n }\r\n\r\n while (pointer < input.length) {\r\n if (pieceIndex === 8) {\r\n return failure;\r\n }\r\n\r\n if (input[pointer] === 58) {\r\n if (compress !== null) {\r\n return failure;\r\n }\r\n ++pointer;\r\n ++pieceIndex;\r\n compress = pieceIndex;\r\n continue;\r\n }\r\n\r\n let value = 0;\r\n let length = 0;\r\n\r\n while (length < 4 && isASCIIHex(input[pointer])) {\r\n value = value * 0x10 + parseInt(at(input, pointer), 16);\r\n ++pointer;\r\n ++length;\r\n }\r\n\r\n if (input[pointer] === 46) {\r\n if (length === 0) {\r\n return failure;\r\n }\r\n\r\n pointer -= length;\r\n\r\n if (pieceIndex > 6) {\r\n return failure;\r\n }\r\n\r\n let numbersSeen = 0;\r\n\r\n while (input[pointer] !== undefined) {\r\n let ipv4Piece = null;\r\n\r\n if (numbersSeen > 0) {\r\n if (input[pointer] === 46 && numbersSeen < 4) {\r\n ++pointer;\r\n } else {\r\n return failure;\r\n }\r\n }\r\n\r\n if (!isASCIIDigit(input[pointer])) {\r\n return failure;\r\n }\r\n\r\n while (isASCIIDigit(input[pointer])) {\r\n const number = parseInt(at(input, pointer));\r\n if (ipv4Piece === null) {\r\n ipv4Piece = number;\r\n } else if (ipv4Piece === 0) {\r\n return failure;\r\n } else {\r\n ipv4Piece = ipv4Piece * 10 + number;\r\n }\r\n if (ipv4Piece > 255) {\r\n return failure;\r\n }\r\n ++pointer;\r\n }\r\n\r\n address[pieceIndex] = address[pieceIndex] * 0x100 + ipv4Piece;\r\n\r\n ++numbersSeen;\r\n\r\n if (numbersSeen === 2 || numbersSeen === 4) {\r\n ++pieceIndex;\r\n }\r\n }\r\n\r\n if (numbersSeen !== 4) {\r\n return failure;\r\n }\r\n\r\n break;\r\n } else if (input[pointer] === 58) {\r\n ++pointer;\r\n if (input[pointer] === undefined) {\r\n return failure;\r\n }\r\n } else if (input[pointer] !== undefined) {\r\n return failure;\r\n }\r\n\r\n address[pieceIndex] = value;\r\n ++pieceIndex;\r\n }\r\n\r\n if (compress !== null) {\r\n let swaps = pieceIndex - compress;\r\n pieceIndex = 7;\r\n while (pieceIndex !== 0 && swaps > 0) {\r\n const temp = address[compress + swaps - 1];\r\n address[compress + swaps - 1] = address[pieceIndex];\r\n address[pieceIndex] = temp;\r\n --pieceIndex;\r\n --swaps;\r\n }\r\n } else if (compress === null && pieceIndex !== 8) {\r\n return failure;\r\n }\r\n\r\n return address;\r\n}\r\n\r\nfunction serializeIPv6(address) {\r\n let output = \"\";\r\n const seqResult = findLongestZeroSequence(address);\r\n const compress = seqResult.idx;\r\n let ignore0 = false;\r\n\r\n for (let pieceIndex = 0; pieceIndex <= 7; ++pieceIndex) {\r\n if (ignore0 && address[pieceIndex] === 0) {\r\n continue;\r\n } else if (ignore0) {\r\n ignore0 = false;\r\n }\r\n\r\n if (compress === pieceIndex) {\r\n const separator = pieceIndex === 0 ? \"::\" : \":\";\r\n output += separator;\r\n ignore0 = true;\r\n continue;\r\n }\r\n\r\n output += address[pieceIndex].toString(16);\r\n\r\n if (pieceIndex !== 7) {\r\n output += \":\";\r\n }\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction parseHost(input, isSpecialArg) {\r\n if (input[0] === \"[\") {\r\n if (input[input.length - 1] !== \"]\") {\r\n return failure;\r\n }\r\n\r\n return parseIPv6(input.substring(1, input.length - 1));\r\n }\r\n\r\n if (!isSpecialArg) {\r\n return parseOpaqueHost(input);\r\n }\r\n\r\n const domain = utf8PercentDecode(input);\r\n const asciiDomain = tr46.toASCII(domain, false, tr46.PROCESSING_OPTIONS.NONTRANSITIONAL, false);\r\n if (asciiDomain === null) {\r\n return failure;\r\n }\r\n\r\n if (containsForbiddenHostCodePoint(asciiDomain)) {\r\n return failure;\r\n }\r\n\r\n const ipv4Host = parseIPv4(asciiDomain);\r\n if (typeof ipv4Host === \"number\" || ipv4Host === failure) {\r\n return ipv4Host;\r\n }\r\n\r\n return asciiDomain;\r\n}\r\n\r\nfunction parseOpaqueHost(input) {\r\n if (containsForbiddenHostCodePointExcludingPercent(input)) {\r\n return failure;\r\n }\r\n\r\n let output = \"\";\r\n const decoded = punycode.ucs2.decode(input);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n output += percentEncodeChar(decoded[i], isC0ControlPercentEncode);\r\n }\r\n return output;\r\n}\r\n\r\nfunction findLongestZeroSequence(arr) {\r\n let maxIdx = null;\r\n let maxLen = 1; // only find elements > 1\r\n let currStart = null;\r\n let currLen = 0;\r\n\r\n for (let i = 0; i < arr.length; ++i) {\r\n if (arr[i] !== 0) {\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n currStart = null;\r\n currLen = 0;\r\n } else {\r\n if (currStart === null) {\r\n currStart = i;\r\n }\r\n ++currLen;\r\n }\r\n }\r\n\r\n // if trailing zeros\r\n if (currLen > maxLen) {\r\n maxIdx = currStart;\r\n maxLen = currLen;\r\n }\r\n\r\n return {\r\n idx: maxIdx,\r\n len: maxLen\r\n };\r\n}\r\n\r\nfunction serializeHost(host) {\r\n if (typeof host === \"number\") {\r\n return serializeIPv4(host);\r\n }\r\n\r\n // IPv6 serializer\r\n if (host instanceof Array) {\r\n return \"[\" + serializeIPv6(host) + \"]\";\r\n }\r\n\r\n return host;\r\n}\r\n\r\nfunction trimControlChars(url) {\r\n return url.replace(/^[\\u0000-\\u001F\\u0020]+|[\\u0000-\\u001F\\u0020]+$/g, \"\");\r\n}\r\n\r\nfunction trimTabAndNewline(url) {\r\n return url.replace(/\\u0009|\\u000A|\\u000D/g, \"\");\r\n}\r\n\r\nfunction shortenPath(url) {\r\n const path = url.path;\r\n if (path.length === 0) {\r\n return;\r\n }\r\n if (url.scheme === \"file\" && path.length === 1 && isNormalizedWindowsDriveLetter(path[0])) {\r\n return;\r\n }\r\n\r\n path.pop();\r\n}\r\n\r\nfunction includesCredentials(url) {\r\n return url.username !== \"\" || url.password !== \"\";\r\n}\r\n\r\nfunction cannotHaveAUsernamePasswordPort(url) {\r\n return url.host === null || url.host === \"\" || url.cannotBeABaseURL || url.scheme === \"file\";\r\n}\r\n\r\nfunction isNormalizedWindowsDriveLetter(string) {\r\n return /^[A-Za-z]:$/.test(string);\r\n}\r\n\r\nfunction URLStateMachine(input, base, encodingOverride, url, stateOverride) {\r\n this.pointer = 0;\r\n this.input = input;\r\n this.base = base || null;\r\n this.encodingOverride = encodingOverride || \"utf-8\";\r\n this.stateOverride = stateOverride;\r\n this.url = url;\r\n this.failure = false;\r\n this.parseError = false;\r\n\r\n if (!this.url) {\r\n this.url = {\r\n scheme: \"\",\r\n username: \"\",\r\n password: \"\",\r\n host: null,\r\n port: null,\r\n path: [],\r\n query: null,\r\n fragment: null,\r\n\r\n cannotBeABaseURL: false\r\n };\r\n\r\n const res = trimControlChars(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n }\r\n\r\n const res = trimTabAndNewline(this.input);\r\n if (res !== this.input) {\r\n this.parseError = true;\r\n }\r\n this.input = res;\r\n\r\n this.state = stateOverride || \"scheme start\";\r\n\r\n this.buffer = \"\";\r\n this.atFlag = false;\r\n this.arrFlag = false;\r\n this.passwordTokenSeenFlag = false;\r\n\r\n this.input = punycode.ucs2.decode(this.input);\r\n\r\n for (; this.pointer <= this.input.length; ++this.pointer) {\r\n const c = this.input[this.pointer];\r\n const cStr = isNaN(c) ? undefined : String.fromCodePoint(c);\r\n\r\n // exec state machine\r\n const ret = this[\"parse \" + this.state](c, cStr);\r\n if (!ret) {\r\n break; // terminate algorithm\r\n } else if (ret === failure) {\r\n this.failure = true;\r\n break;\r\n }\r\n }\r\n}\r\n\r\nURLStateMachine.prototype[\"parse scheme start\"] = function parseSchemeStart(c, cStr) {\r\n if (isASCIIAlpha(c)) {\r\n this.buffer += cStr.toLowerCase();\r\n this.state = \"scheme\";\r\n } else if (!this.stateOverride) {\r\n this.state = \"no scheme\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse scheme\"] = function parseScheme(c, cStr) {\r\n if (isASCIIAlphanumeric(c) || c === 43 || c === 45 || c === 46) {\r\n this.buffer += cStr.toLowerCase();\r\n } else if (c === 58) {\r\n if (this.stateOverride) {\r\n if (isSpecial(this.url) && !isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if (!isSpecial(this.url) && isSpecialScheme(this.buffer)) {\r\n return false;\r\n }\r\n\r\n if ((includesCredentials(this.url) || this.url.port !== null) && this.buffer === \"file\") {\r\n return false;\r\n }\r\n\r\n if (this.url.scheme === \"file\" && (this.url.host === \"\" || this.url.host === null)) {\r\n return false;\r\n }\r\n }\r\n this.url.scheme = this.buffer;\r\n this.buffer = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n if (this.url.scheme === \"file\") {\r\n if (this.input[this.pointer + 1] !== 47 || this.input[this.pointer + 2] !== 47) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file\";\r\n } else if (isSpecial(this.url) && this.base !== null && this.base.scheme === this.url.scheme) {\r\n this.state = \"special relative or authority\";\r\n } else if (isSpecial(this.url)) {\r\n this.state = \"special authority slashes\";\r\n } else if (this.input[this.pointer + 1] === 47) {\r\n this.state = \"path or authority\";\r\n ++this.pointer;\r\n } else {\r\n this.url.cannotBeABaseURL = true;\r\n this.url.path.push(\"\");\r\n this.state = \"cannot-be-a-base-URL path\";\r\n }\r\n } else if (!this.stateOverride) {\r\n this.buffer = \"\";\r\n this.state = \"no scheme\";\r\n this.pointer = -1;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse no scheme\"] = function parseNoScheme(c) {\r\n if (this.base === null || (this.base.cannotBeABaseURL && c !== 35)) {\r\n return failure;\r\n } else if (this.base.cannotBeABaseURL && c === 35) {\r\n this.url.scheme = this.base.scheme;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.url.cannotBeABaseURL = true;\r\n this.state = \"fragment\";\r\n } else if (this.base.scheme === \"file\") {\r\n this.state = \"file\";\r\n --this.pointer;\r\n } else {\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special relative or authority\"] = function parseSpecialRelativeOrAuthority(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"relative\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path or authority\"] = function parsePathOrAuthority(c) {\r\n if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative\"] = function parseRelative(c) {\r\n this.url.scheme = this.base.scheme;\r\n if (isNaN(c)) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 47) {\r\n this.state = \"relative slash\";\r\n } else if (c === 63) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n this.state = \"relative slash\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.url.path = this.base.path.slice(0, this.base.path.length - 1);\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse relative slash\"] = function parseRelativeSlash(c) {\r\n if (isSpecial(this.url) && (c === 47 || c === 92)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"special authority ignore slashes\";\r\n } else if (c === 47) {\r\n this.state = \"authority\";\r\n } else {\r\n this.url.username = this.base.username;\r\n this.url.password = this.base.password;\r\n this.url.host = this.base.host;\r\n this.url.port = this.base.port;\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority slashes\"] = function parseSpecialAuthoritySlashes(c) {\r\n if (c === 47 && this.input[this.pointer + 1] === 47) {\r\n this.state = \"special authority ignore slashes\";\r\n ++this.pointer;\r\n } else {\r\n this.parseError = true;\r\n this.state = \"special authority ignore slashes\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse special authority ignore slashes\"] = function parseSpecialAuthorityIgnoreSlashes(c) {\r\n if (c !== 47 && c !== 92) {\r\n this.state = \"authority\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse authority\"] = function parseAuthority(c, cStr) {\r\n if (c === 64) {\r\n this.parseError = true;\r\n if (this.atFlag) {\r\n this.buffer = \"%40\" + this.buffer;\r\n }\r\n this.atFlag = true;\r\n\r\n // careful, this is based on buffer and has its own pointer (this.pointer != pointer) and inner chars\r\n const len = countSymbols(this.buffer);\r\n for (let pointer = 0; pointer < len; ++pointer) {\r\n const codePoint = this.buffer.codePointAt(pointer);\r\n\r\n if (codePoint === 58 && !this.passwordTokenSeenFlag) {\r\n this.passwordTokenSeenFlag = true;\r\n continue;\r\n }\r\n const encodedCodePoints = percentEncodeChar(codePoint, isUserinfoPercentEncode);\r\n if (this.passwordTokenSeenFlag) {\r\n this.url.password += encodedCodePoints;\r\n } else {\r\n this.url.username += encodedCodePoints;\r\n }\r\n }\r\n this.buffer = \"\";\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n if (this.atFlag && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.pointer -= countSymbols(this.buffer) + 1;\r\n this.buffer = \"\";\r\n this.state = \"host\";\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse hostname\"] =\r\nURLStateMachine.prototype[\"parse host\"] = function parseHostName(c, cStr) {\r\n if (this.stateOverride && this.url.scheme === \"file\") {\r\n --this.pointer;\r\n this.state = \"file host\";\r\n } else if (c === 58 && !this.arrFlag) {\r\n if (this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"port\";\r\n if (this.stateOverride === \"hostname\") {\r\n return false;\r\n }\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92)) {\r\n --this.pointer;\r\n if (isSpecial(this.url) && this.buffer === \"\") {\r\n this.parseError = true;\r\n return failure;\r\n } else if (this.stateOverride && this.buffer === \"\" &&\r\n (includesCredentials(this.url) || this.url.port !== null)) {\r\n this.parseError = true;\r\n return false;\r\n }\r\n\r\n const host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n\r\n this.url.host = host;\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n } else {\r\n if (c === 91) {\r\n this.arrFlag = true;\r\n } else if (c === 93) {\r\n this.arrFlag = false;\r\n }\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse port\"] = function parsePort(c, cStr) {\r\n if (isASCIIDigit(c)) {\r\n this.buffer += cStr;\r\n } else if (isNaN(c) || c === 47 || c === 63 || c === 35 ||\r\n (isSpecial(this.url) && c === 92) ||\r\n this.stateOverride) {\r\n if (this.buffer !== \"\") {\r\n const port = parseInt(this.buffer);\r\n if (port > Math.pow(2, 16) - 1) {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n this.url.port = port === defaultPort(this.url.scheme) ? null : port;\r\n this.buffer = \"\";\r\n }\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n --this.pointer;\r\n } else {\r\n this.parseError = true;\r\n return failure;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nconst fileOtherwiseCodePoints = new Set([47, 92, 63, 35]);\r\n\r\nURLStateMachine.prototype[\"parse file\"] = function parseFile(c) {\r\n this.url.scheme = \"file\";\r\n\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file slash\";\r\n } else if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNaN(c)) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n } else if (c === 63) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n this.url.query = this.base.query;\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n if (this.input.length - this.pointer - 1 === 0 || // remaining consists of 0 code points\r\n !isWindowsDriveLetterCodePoints(c, this.input[this.pointer + 1]) ||\r\n (this.input.length - this.pointer - 1 >= 2 && // remaining has at least 2 code points\r\n !fileOtherwiseCodePoints.has(this.input[this.pointer + 2]))) {\r\n this.url.host = this.base.host;\r\n this.url.path = this.base.path.slice();\r\n shortenPath(this.url);\r\n } else {\r\n this.parseError = true;\r\n }\r\n\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n } else {\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file slash\"] = function parseFileSlash(c) {\r\n if (c === 47 || c === 92) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"file host\";\r\n } else {\r\n if (this.base !== null && this.base.scheme === \"file\") {\r\n if (isNormalizedWindowsDriveLetterString(this.base.path[0])) {\r\n this.url.path.push(this.base.path[0]);\r\n } else {\r\n this.url.host = this.base.host;\r\n }\r\n }\r\n this.state = \"path\";\r\n --this.pointer;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse file host\"] = function parseFileHost(c, cStr) {\r\n if (isNaN(c) || c === 47 || c === 92 || c === 63 || c === 35) {\r\n --this.pointer;\r\n if (!this.stateOverride && isWindowsDriveLetterString(this.buffer)) {\r\n this.parseError = true;\r\n this.state = \"path\";\r\n } else if (this.buffer === \"\") {\r\n this.url.host = \"\";\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n this.state = \"path start\";\r\n } else {\r\n let host = parseHost(this.buffer, isSpecial(this.url));\r\n if (host === failure) {\r\n return failure;\r\n }\r\n if (host === \"localhost\") {\r\n host = \"\";\r\n }\r\n this.url.host = host;\r\n\r\n if (this.stateOverride) {\r\n return false;\r\n }\r\n\r\n this.buffer = \"\";\r\n this.state = \"path start\";\r\n }\r\n } else {\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path start\"] = function parsePathStart(c) {\r\n if (isSpecial(this.url)) {\r\n if (c === 92) {\r\n this.parseError = true;\r\n }\r\n this.state = \"path\";\r\n\r\n if (c !== 47 && c !== 92) {\r\n --this.pointer;\r\n }\r\n } else if (!this.stateOverride && c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (!this.stateOverride && c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else if (c !== undefined) {\r\n this.state = \"path\";\r\n if (c !== 47) {\r\n --this.pointer;\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse path\"] = function parsePath(c) {\r\n if (isNaN(c) || c === 47 || (isSpecial(this.url) && c === 92) ||\r\n (!this.stateOverride && (c === 63 || c === 35))) {\r\n if (isSpecial(this.url) && c === 92) {\r\n this.parseError = true;\r\n }\r\n\r\n if (isDoubleDot(this.buffer)) {\r\n shortenPath(this.url);\r\n if (c !== 47 && !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n }\r\n } else if (isSingleDot(this.buffer) && c !== 47 &&\r\n !(isSpecial(this.url) && c === 92)) {\r\n this.url.path.push(\"\");\r\n } else if (!isSingleDot(this.buffer)) {\r\n if (this.url.scheme === \"file\" && this.url.path.length === 0 && isWindowsDriveLetterString(this.buffer)) {\r\n if (this.url.host !== \"\" && this.url.host !== null) {\r\n this.parseError = true;\r\n this.url.host = \"\";\r\n }\r\n this.buffer = this.buffer[0] + \":\";\r\n }\r\n this.url.path.push(this.buffer);\r\n }\r\n this.buffer = \"\";\r\n if (this.url.scheme === \"file\" && (c === undefined || c === 63 || c === 35)) {\r\n while (this.url.path.length > 1 && this.url.path[0] === \"\") {\r\n this.parseError = true;\r\n this.url.path.shift();\r\n }\r\n }\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n }\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += percentEncodeChar(c, isPathPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse cannot-be-a-base-URL path\"] = function parseCannotBeABaseURLPath(c) {\r\n if (c === 63) {\r\n this.url.query = \"\";\r\n this.state = \"query\";\r\n } else if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n } else {\r\n // TODO: Add: not a URL code point\r\n if (!isNaN(c) && c !== 37) {\r\n this.parseError = true;\r\n }\r\n\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n if (!isNaN(c)) {\r\n this.url.path[0] = this.url.path[0] + percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse query\"] = function parseQuery(c, cStr) {\r\n if (isNaN(c) || (!this.stateOverride && c === 35)) {\r\n if (!isSpecial(this.url) || this.url.scheme === \"ws\" || this.url.scheme === \"wss\") {\r\n this.encodingOverride = \"utf-8\";\r\n }\r\n\r\n const buffer = new Buffer(this.buffer); // TODO: Use encoding override instead\r\n for (let i = 0; i < buffer.length; ++i) {\r\n if (buffer[i] < 0x21 || buffer[i] > 0x7E || buffer[i] === 0x22 || buffer[i] === 0x23 ||\r\n buffer[i] === 0x3C || buffer[i] === 0x3E) {\r\n this.url.query += percentEncode(buffer[i]);\r\n } else {\r\n this.url.query += String.fromCodePoint(buffer[i]);\r\n }\r\n }\r\n\r\n this.buffer = \"\";\r\n if (c === 35) {\r\n this.url.fragment = \"\";\r\n this.state = \"fragment\";\r\n }\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.buffer += cStr;\r\n }\r\n\r\n return true;\r\n};\r\n\r\nURLStateMachine.prototype[\"parse fragment\"] = function parseFragment(c) {\r\n if (isNaN(c)) { // do nothing\r\n } else if (c === 0x0) {\r\n this.parseError = true;\r\n } else {\r\n // TODO: If c is not a URL code point and not \"%\", parse error.\r\n if (c === 37 &&\r\n (!isASCIIHex(this.input[this.pointer + 1]) ||\r\n !isASCIIHex(this.input[this.pointer + 2]))) {\r\n this.parseError = true;\r\n }\r\n\r\n this.url.fragment += percentEncodeChar(c, isC0ControlPercentEncode);\r\n }\r\n\r\n return true;\r\n};\r\n\r\nfunction serializeURL(url, excludeFragment) {\r\n let output = url.scheme + \":\";\r\n if (url.host !== null) {\r\n output += \"//\";\r\n\r\n if (url.username !== \"\" || url.password !== \"\") {\r\n output += url.username;\r\n if (url.password !== \"\") {\r\n output += \":\" + url.password;\r\n }\r\n output += \"@\";\r\n }\r\n\r\n output += serializeHost(url.host);\r\n\r\n if (url.port !== null) {\r\n output += \":\" + url.port;\r\n }\r\n } else if (url.host === null && url.scheme === \"file\") {\r\n output += \"//\";\r\n }\r\n\r\n if (url.cannotBeABaseURL) {\r\n output += url.path[0];\r\n } else {\r\n for (const string of url.path) {\r\n output += \"/\" + string;\r\n }\r\n }\r\n\r\n if (url.query !== null) {\r\n output += \"?\" + url.query;\r\n }\r\n\r\n if (!excludeFragment && url.fragment !== null) {\r\n output += \"#\" + url.fragment;\r\n }\r\n\r\n return output;\r\n}\r\n\r\nfunction serializeOrigin(tuple) {\r\n let result = tuple.scheme + \"://\";\r\n result += serializeHost(tuple.host);\r\n\r\n if (tuple.port !== null) {\r\n result += \":\" + tuple.port;\r\n }\r\n\r\n return result;\r\n}\r\n\r\nmodule.exports.serializeURL = serializeURL;\r\n\r\nmodule.exports.serializeURLOrigin = function (url) {\r\n // https://url.spec.whatwg.org/#concept-url-origin\r\n switch (url.scheme) {\r\n case \"blob\":\r\n try {\r\n return module.exports.serializeURLOrigin(module.exports.parseURL(url.path[0]));\r\n } catch (e) {\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n case \"ftp\":\r\n case \"gopher\":\r\n case \"http\":\r\n case \"https\":\r\n case \"ws\":\r\n case \"wss\":\r\n return serializeOrigin({\r\n scheme: url.scheme,\r\n host: url.host,\r\n port: url.port\r\n });\r\n case \"file\":\r\n // spec says \"exercise to the reader\", chrome says \"file://\"\r\n return \"file://\";\r\n default:\r\n // serializing an opaque origin returns \"null\"\r\n return \"null\";\r\n }\r\n};\r\n\r\nmodule.exports.basicURLParse = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n const usm = new URLStateMachine(input, options.baseURL, options.encodingOverride, options.url, options.stateOverride);\r\n if (usm.failure) {\r\n return \"failure\";\r\n }\r\n\r\n return usm.url;\r\n};\r\n\r\nmodule.exports.setTheUsername = function (url, username) {\r\n url.username = \"\";\r\n const decoded = punycode.ucs2.decode(username);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.username += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.setThePassword = function (url, password) {\r\n url.password = \"\";\r\n const decoded = punycode.ucs2.decode(password);\r\n for (let i = 0; i < decoded.length; ++i) {\r\n url.password += percentEncodeChar(decoded[i], isUserinfoPercentEncode);\r\n }\r\n};\r\n\r\nmodule.exports.serializeHost = serializeHost;\r\n\r\nmodule.exports.cannotHaveAUsernamePasswordPort = cannotHaveAUsernamePasswordPort;\r\n\r\nmodule.exports.serializeInteger = function (integer) {\r\n return String(integer);\r\n};\r\n\r\nmodule.exports.parseURL = function (input, options) {\r\n if (options === undefined) {\r\n options = {};\r\n }\r\n\r\n // We don't handle blobs, so this just delegates:\r\n return module.exports.basicURLParse(input, { baseURL: options.baseURL, encodingOverride: options.encodingOverride });\r\n};\r\n","\"use strict\";\n\nmodule.exports.mixin = function mixin(target, source) {\n const keys = Object.getOwnPropertyNames(source);\n for (let i = 0; i < keys.length; ++i) {\n Object.defineProperty(target, keys[i], Object.getOwnPropertyDescriptor(source, keys[i]));\n }\n};\n\nmodule.exports.wrapperSymbol = Symbol(\"wrapper\");\nmodule.exports.implSymbol = Symbol(\"impl\");\n\nmodule.exports.wrapperForImpl = function (impl) {\n return impl[module.exports.wrapperSymbol];\n};\n\nmodule.exports.implForWrapper = function (wrapper) {\n return wrapper[module.exports.implSymbol];\n};\n\n","// Returns a wrapper function that returns a wrapped callback\n// The wrapper function should do some stuff, and return a\n// presumably different callback function.\n// This makes sure that own properties are retained, so that\n// decorations and such are not lost along the way.\nmodule.exports = wrappy\nfunction wrappy (fn, cb) {\n if (fn && cb) return wrappy(fn)(cb)\n\n if (typeof fn !== 'function')\n throw new TypeError('need wrapper function')\n\n Object.keys(fn).forEach(function (k) {\n wrapper[k] = fn[k]\n })\n\n return wrapper\n\n function wrapper() {\n var args = new Array(arguments.length)\n for (var i = 0; i < args.length; i++) {\n args[i] = arguments[i]\n }\n var ret = fn.apply(this, args)\n var cb = args[args.length-1]\n if (typeof ret === 'function' && ret !== cb) {\n Object.keys(cb).forEach(function (k) {\n ret[k] = cb[k]\n })\n }\n return ret\n }\n}\n",null,"module.exports = require(\"assert\");","module.exports = require(\"crypto\");","module.exports = require(\"events\");","module.exports = require(\"fs\");","module.exports = require(\"http\");","module.exports = require(\"https\");","module.exports = require(\"net\");","module.exports = require(\"node:fs\");","module.exports = require(\"node:stream\");","module.exports = require(\"os\");","module.exports = require(\"path\");","module.exports = require(\"punycode\");","module.exports = require(\"stream\");","module.exports = require(\"tls\");","module.exports = require(\"url\");","module.exports = require(\"util\");","module.exports = require(\"worker_threads\");","module.exports = require(\"zlib\");","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _FormDataEncoder_instances, _FormDataEncoder_CRLF, _FormDataEncoder_CRLF_BYTES, _FormDataEncoder_CRLF_BYTES_LENGTH, _FormDataEncoder_DASHES, _FormDataEncoder_encoder, _FormDataEncoder_footer, _FormDataEncoder_form, _FormDataEncoder_options, _FormDataEncoder_getFieldHeader;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Encoder = exports.FormDataEncoder = void 0;\nconst createBoundary_1 = __importDefault(require(\"./util/createBoundary\"));\nconst isPlainObject_1 = __importDefault(require(\"./util/isPlainObject\"));\nconst normalizeValue_1 = __importDefault(require(\"./util/normalizeValue\"));\nconst escapeName_1 = __importDefault(require(\"./util/escapeName\"));\nconst isFileLike_1 = require(\"./util/isFileLike\");\nconst isFormData_1 = require(\"./util/isFormData\");\nconst defaultOptions = {\n enableAdditionalHeaders: false\n};\nclass FormDataEncoder {\n constructor(form, boundaryOrOptions, options) {\n _FormDataEncoder_instances.add(this);\n _FormDataEncoder_CRLF.set(this, \"\\r\\n\");\n _FormDataEncoder_CRLF_BYTES.set(this, void 0);\n _FormDataEncoder_CRLF_BYTES_LENGTH.set(this, void 0);\n _FormDataEncoder_DASHES.set(this, \"-\".repeat(2));\n _FormDataEncoder_encoder.set(this, new TextEncoder());\n _FormDataEncoder_footer.set(this, void 0);\n _FormDataEncoder_form.set(this, void 0);\n _FormDataEncoder_options.set(this, void 0);\n if (!(0, isFormData_1.isFormData)(form)) {\n throw new TypeError(\"Expected first argument to be a FormData instance.\");\n }\n let boundary;\n if ((0, isPlainObject_1.default)(boundaryOrOptions)) {\n options = boundaryOrOptions;\n }\n else {\n boundary = boundaryOrOptions;\n }\n if (!boundary) {\n boundary = (0, createBoundary_1.default)();\n }\n if (typeof boundary !== \"string\") {\n throw new TypeError(\"Expected boundary argument to be a string.\");\n }\n if (options && !(0, isPlainObject_1.default)(options)) {\n throw new TypeError(\"Expected options argument to be an object.\");\n }\n __classPrivateFieldSet(this, _FormDataEncoder_form, form, \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_options, { ...defaultOptions, ...options }, \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES, __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")), \"f\");\n __classPrivateFieldSet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, \"f\").byteLength, \"f\");\n this.boundary = `form-data-boundary-${boundary}`;\n this.contentType = `multipart/form-data; boundary=${this.boundary}`;\n __classPrivateFieldSet(this, _FormDataEncoder_footer, __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(`${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\").repeat(2)}`), \"f\");\n this.contentLength = String(this.getContentLength());\n this.headers = Object.freeze({\n \"Content-Type\": this.contentType,\n \"Content-Length\": this.contentLength\n });\n Object.defineProperties(this, {\n boundary: { writable: false, configurable: false },\n contentType: { writable: false, configurable: false },\n contentLength: { writable: false, configurable: false },\n headers: { writable: false, configurable: false }\n });\n }\n getContentLength() {\n let length = 0;\n for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, \"f\")) {\n const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode((0, normalizeValue_1.default)(raw));\n length += __classPrivateFieldGet(this, _FormDataEncoder_instances, \"m\", _FormDataEncoder_getFieldHeader).call(this, name, value).byteLength;\n length += (0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength;\n length += __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES_LENGTH, \"f\");\n }\n return length + __classPrivateFieldGet(this, _FormDataEncoder_footer, \"f\").byteLength;\n }\n *values() {\n for (const [name, raw] of __classPrivateFieldGet(this, _FormDataEncoder_form, \"f\").entries()) {\n const value = (0, isFileLike_1.isFileLike)(raw) ? raw : __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode((0, normalizeValue_1.default)(raw));\n yield __classPrivateFieldGet(this, _FormDataEncoder_instances, \"m\", _FormDataEncoder_getFieldHeader).call(this, name, value);\n yield value;\n yield __classPrivateFieldGet(this, _FormDataEncoder_CRLF_BYTES, \"f\");\n }\n yield __classPrivateFieldGet(this, _FormDataEncoder_footer, \"f\");\n }\n async *encode() {\n for (const part of this.values()) {\n if ((0, isFileLike_1.isFileLike)(part)) {\n yield* part.stream();\n }\n else {\n yield part;\n }\n }\n }\n [(_FormDataEncoder_CRLF = new WeakMap(), _FormDataEncoder_CRLF_BYTES = new WeakMap(), _FormDataEncoder_CRLF_BYTES_LENGTH = new WeakMap(), _FormDataEncoder_DASHES = new WeakMap(), _FormDataEncoder_encoder = new WeakMap(), _FormDataEncoder_footer = new WeakMap(), _FormDataEncoder_form = new WeakMap(), _FormDataEncoder_options = new WeakMap(), _FormDataEncoder_instances = new WeakSet(), _FormDataEncoder_getFieldHeader = function _FormDataEncoder_getFieldHeader(name, value) {\n let header = \"\";\n header += `${__classPrivateFieldGet(this, _FormDataEncoder_DASHES, \"f\")}${this.boundary}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}`;\n header += `Content-Disposition: form-data; name=\"${(0, escapeName_1.default)(name)}\"`;\n if ((0, isFileLike_1.isFileLike)(value)) {\n header += `; filename=\"${(0, escapeName_1.default)(value.name)}\"${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}`;\n header += `Content-Type: ${value.type || \"application/octet-stream\"}`;\n }\n if (__classPrivateFieldGet(this, _FormDataEncoder_options, \"f\").enableAdditionalHeaders === true) {\n header += `${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\")}Content-Length: ${(0, isFileLike_1.isFileLike)(value) ? value.size : value.byteLength}`;\n }\n return __classPrivateFieldGet(this, _FormDataEncoder_encoder, \"f\").encode(`${header}${__classPrivateFieldGet(this, _FormDataEncoder_CRLF, \"f\").repeat(2)}`);\n }, Symbol.iterator)]() {\n return this.values();\n }\n [Symbol.asyncIterator]() {\n return this.encode();\n }\n}\nexports.FormDataEncoder = FormDataEncoder;\nexports.Encoder = FormDataEncoder;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./FormDataEncoder\"), exports);\n__exportStar(require(\"./FileLike\"), exports);\n__exportStar(require(\"./FormDataLike\"), exports);\n__exportStar(require(\"./util/isFileLike\"), exports);\n__exportStar(require(\"./util/isFormData\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst alphabet = \"abcdefghijklmnopqrstuvwxyz0123456789\";\nfunction createBoundary() {\n let size = 16;\n let res = \"\";\n while (size--) {\n res += alphabet[(Math.random() * alphabet.length) << 0];\n }\n return res;\n}\nexports.default = createBoundary;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst escapeName = (name) => String(name)\n .replace(/\\r/g, \"%0D\")\n .replace(/\\n/g, \"%0A\")\n .replace(/\"/g, \"%22\");\nexports.default = escapeName;\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFileLike = void 0;\nconst isFunction_1 = __importDefault(require(\"./isFunction\"));\nconst isFileLike = (value) => Boolean(value\n && typeof value === \"object\"\n && (0, isFunction_1.default)(value.constructor)\n && value[Symbol.toStringTag] === \"File\"\n && (0, isFunction_1.default)(value.stream)\n && value.name != null\n && value.size != null\n && value.lastModified != null);\nexports.isFileLike = isFileLike;\n","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFormDataLike = exports.isFormData = void 0;\nconst isFunction_1 = __importDefault(require(\"./isFunction\"));\nconst isFormData = (value) => Boolean(value\n && (0, isFunction_1.default)(value.constructor)\n && value[Symbol.toStringTag] === \"FormData\"\n && (0, isFunction_1.default)(value.append)\n && (0, isFunction_1.default)(value.getAll)\n && (0, isFunction_1.default)(value.entries)\n && (0, isFunction_1.default)(value[Symbol.iterator]));\nexports.isFormData = isFormData;\nexports.isFormDataLike = exports.isFormData;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst isFunction = (value) => (typeof value === \"function\");\nexports.default = isFunction;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase());\nfunction isPlainObject(value) {\n if (getType(value) !== \"object\") {\n return false;\n }\n const pp = Object.getPrototypeOf(value);\n if (pp === null || pp === undefined) {\n return true;\n }\n const Ctor = pp.constructor && pp.constructor.toString();\n return Ctor === Object.toString();\n}\nexports.default = isPlainObject;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst normalizeValue = (value) => String(value)\n .replace(/\\r|\\n/g, (match, i, str) => {\n if ((match === \"\\r\" && str[i + 1] !== \"\\n\")\n || (match === \"\\n\" && str[i - 1] !== \"\\r\")) {\n return \"\\r\\n\";\n }\n return match;\n});\nexports.default = normalizeValue;\n","\"use strict\";\n/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _Blob_parts, _Blob_type, _Blob_size;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Blob = void 0;\nconst web_streams_polyfill_1 = require(\"web-streams-polyfill\");\nconst isFunction_1 = require(\"./isFunction\");\nconst blobHelpers_1 = require(\"./blobHelpers\");\nclass Blob {\n constructor(blobParts = [], options = {}) {\n _Blob_parts.set(this, []);\n _Blob_type.set(this, \"\");\n _Blob_size.set(this, 0);\n options !== null && options !== void 0 ? options : (options = {});\n if (typeof blobParts !== \"object\" || blobParts === null) {\n throw new TypeError(\"Failed to construct 'Blob': \"\n + \"The provided value cannot be converted to a sequence.\");\n }\n if (!(0, isFunction_1.isFunction)(blobParts[Symbol.iterator])) {\n throw new TypeError(\"Failed to construct 'Blob': \"\n + \"The object must have a callable @@iterator property.\");\n }\n if (typeof options !== \"object\" && !(0, isFunction_1.isFunction)(options)) {\n throw new TypeError(\"Failed to construct 'Blob': parameter 2 cannot convert to dictionary.\");\n }\n const encoder = new TextEncoder();\n for (const raw of blobParts) {\n let part;\n if (ArrayBuffer.isView(raw)) {\n part = new Uint8Array(raw.buffer.slice(raw.byteOffset, raw.byteOffset + raw.byteLength));\n }\n else if (raw instanceof ArrayBuffer) {\n part = new Uint8Array(raw.slice(0));\n }\n else if (raw instanceof Blob) {\n part = raw;\n }\n else {\n part = encoder.encode(String(raw));\n }\n __classPrivateFieldSet(this, _Blob_size, __classPrivateFieldGet(this, _Blob_size, \"f\") + (ArrayBuffer.isView(part) ? part.byteLength : part.size), \"f\");\n __classPrivateFieldGet(this, _Blob_parts, \"f\").push(part);\n }\n const type = options.type === undefined ? \"\" : String(options.type);\n __classPrivateFieldSet(this, _Blob_type, /^[\\x20-\\x7E]*$/.test(type) ? type : \"\", \"f\");\n }\n static [(_Blob_parts = new WeakMap(), _Blob_type = new WeakMap(), _Blob_size = new WeakMap(), Symbol.hasInstance)](value) {\n return Boolean(value\n && typeof value === \"object\"\n && (0, isFunction_1.isFunction)(value.constructor)\n && ((0, isFunction_1.isFunction)(value.stream)\n || (0, isFunction_1.isFunction)(value.arrayBuffer))\n && /^(Blob|File)$/.test(value[Symbol.toStringTag]));\n }\n get type() {\n return __classPrivateFieldGet(this, _Blob_type, \"f\");\n }\n get size() {\n return __classPrivateFieldGet(this, _Blob_size, \"f\");\n }\n slice(start, end, contentType) {\n return new Blob((0, blobHelpers_1.sliceBlob)(__classPrivateFieldGet(this, _Blob_parts, \"f\"), this.size, start, end), {\n type: contentType\n });\n }\n async text() {\n const decoder = new TextDecoder();\n let result = \"\";\n for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"))) {\n result += decoder.decode(chunk, { stream: true });\n }\n result += decoder.decode();\n return result;\n }\n async arrayBuffer() {\n const view = new Uint8Array(this.size);\n let offset = 0;\n for await (const chunk of (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"))) {\n view.set(chunk, offset);\n offset += chunk.length;\n }\n return view.buffer;\n }\n stream() {\n const iterator = (0, blobHelpers_1.consumeBlobParts)(__classPrivateFieldGet(this, _Blob_parts, \"f\"), true);\n return new web_streams_polyfill_1.ReadableStream({\n async pull(controller) {\n const { value, done } = await iterator.next();\n if (done) {\n return queueMicrotask(() => controller.close());\n }\n controller.enqueue(value);\n },\n async cancel() {\n await iterator.return();\n }\n });\n }\n get [Symbol.toStringTag]() {\n return \"Blob\";\n }\n}\nexports.Blob = Blob;\nObject.defineProperties(Blob.prototype, {\n type: { enumerable: true },\n size: { enumerable: true },\n slice: { enumerable: true },\n stream: { enumerable: true },\n text: { enumerable: true },\n arrayBuffer: { enumerable: true }\n});\n","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _File_name, _File_lastModified;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.File = void 0;\nconst Blob_1 = require(\"./Blob\");\nclass File extends Blob_1.Blob {\n constructor(fileBits, name, options = {}) {\n super(fileBits, options);\n _File_name.set(this, void 0);\n _File_lastModified.set(this, 0);\n if (arguments.length < 2) {\n throw new TypeError(\"Failed to construct 'File': 2 arguments required, \"\n + `but only ${arguments.length} present.`);\n }\n __classPrivateFieldSet(this, _File_name, String(name), \"f\");\n const lastModified = options.lastModified === undefined\n ? Date.now()\n : Number(options.lastModified);\n if (!Number.isNaN(lastModified)) {\n __classPrivateFieldSet(this, _File_lastModified, lastModified, \"f\");\n }\n }\n static [(_File_name = new WeakMap(), _File_lastModified = new WeakMap(), Symbol.hasInstance)](value) {\n return value instanceof Blob_1.Blob\n && value[Symbol.toStringTag] === \"File\"\n && typeof value.name === \"string\";\n }\n get name() {\n return __classPrivateFieldGet(this, _File_name, \"f\");\n }\n get lastModified() {\n return __classPrivateFieldGet(this, _File_lastModified, \"f\");\n }\n get webkitRelativePath() {\n return \"\";\n }\n get [Symbol.toStringTag]() {\n return \"File\";\n }\n}\nexports.File = File;\n","\"use strict\";\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _FormData_instances, _FormData_entries, _FormData_setEntry;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FormData = void 0;\nconst util_1 = require(\"util\");\nconst File_1 = require(\"./File\");\nconst isFile_1 = require(\"./isFile\");\nconst isBlob_1 = require(\"./isBlob\");\nconst isFunction_1 = require(\"./isFunction\");\nconst deprecateConstructorEntries_1 = require(\"./deprecateConstructorEntries\");\nclass FormData {\n constructor(entries) {\n _FormData_instances.add(this);\n _FormData_entries.set(this, new Map());\n if (entries) {\n (0, deprecateConstructorEntries_1.deprecateConstructorEntries)();\n entries.forEach(({ name, value, fileName }) => this.append(name, value, fileName));\n }\n }\n static [(_FormData_entries = new WeakMap(), _FormData_instances = new WeakSet(), Symbol.hasInstance)](value) {\n return Boolean(value\n && (0, isFunction_1.isFunction)(value.constructor)\n && value[Symbol.toStringTag] === \"FormData\"\n && (0, isFunction_1.isFunction)(value.append)\n && (0, isFunction_1.isFunction)(value.set)\n && (0, isFunction_1.isFunction)(value.get)\n && (0, isFunction_1.isFunction)(value.getAll)\n && (0, isFunction_1.isFunction)(value.has)\n && (0, isFunction_1.isFunction)(value.delete)\n && (0, isFunction_1.isFunction)(value.entries)\n && (0, isFunction_1.isFunction)(value.values)\n && (0, isFunction_1.isFunction)(value.keys)\n && (0, isFunction_1.isFunction)(value[Symbol.iterator])\n && (0, isFunction_1.isFunction)(value.forEach));\n }\n append(name, value, fileName) {\n __classPrivateFieldGet(this, _FormData_instances, \"m\", _FormData_setEntry).call(this, {\n name,\n fileName,\n append: true,\n rawValue: value,\n argsLength: arguments.length\n });\n }\n set(name, value, fileName) {\n __classPrivateFieldGet(this, _FormData_instances, \"m\", _FormData_setEntry).call(this, {\n name,\n fileName,\n append: false,\n rawValue: value,\n argsLength: arguments.length\n });\n }\n get(name) {\n const field = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(String(name));\n if (!field) {\n return null;\n }\n return field[0];\n }\n getAll(name) {\n const field = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(String(name));\n if (!field) {\n return [];\n }\n return field.slice();\n }\n has(name) {\n return __classPrivateFieldGet(this, _FormData_entries, \"f\").has(String(name));\n }\n delete(name) {\n __classPrivateFieldGet(this, _FormData_entries, \"f\").delete(String(name));\n }\n *keys() {\n for (const key of __classPrivateFieldGet(this, _FormData_entries, \"f\").keys()) {\n yield key;\n }\n }\n *entries() {\n for (const name of this.keys()) {\n const values = this.getAll(name);\n for (const value of values) {\n yield [name, value];\n }\n }\n }\n *values() {\n for (const [, value] of this) {\n yield value;\n }\n }\n [(_FormData_setEntry = function _FormData_setEntry({ name, rawValue, append, fileName, argsLength }) {\n const methodName = append ? \"append\" : \"set\";\n if (argsLength < 2) {\n throw new TypeError(`Failed to execute '${methodName}' on 'FormData': `\n + `2 arguments required, but only ${argsLength} present.`);\n }\n name = String(name);\n let value;\n if ((0, isFile_1.isFile)(rawValue)) {\n value = fileName === undefined\n ? rawValue\n : new File_1.File([rawValue], fileName, {\n type: rawValue.type,\n lastModified: rawValue.lastModified\n });\n }\n else if ((0, isBlob_1.isBlob)(rawValue)) {\n value = new File_1.File([rawValue], fileName === undefined ? \"blob\" : fileName, {\n type: rawValue.type\n });\n }\n else if (fileName) {\n throw new TypeError(`Failed to execute '${methodName}' on 'FormData': `\n + \"parameter 2 is not of type 'Blob'.\");\n }\n else {\n value = String(rawValue);\n }\n const values = __classPrivateFieldGet(this, _FormData_entries, \"f\").get(name);\n if (!values) {\n return void __classPrivateFieldGet(this, _FormData_entries, \"f\").set(name, [value]);\n }\n if (!append) {\n return void __classPrivateFieldGet(this, _FormData_entries, \"f\").set(name, [value]);\n }\n values.push(value);\n }, Symbol.iterator)]() {\n return this.entries();\n }\n forEach(callback, thisArg) {\n for (const [name, value] of this) {\n callback.call(thisArg, value, name, this);\n }\n }\n get [Symbol.toStringTag]() {\n return \"FormData\";\n }\n [util_1.inspect.custom]() {\n return this[Symbol.toStringTag];\n }\n}\nexports.FormData = FormData;\n","\"use strict\";\n/*! Based on fetch-blob. MIT License. Jimmy Wärting & David Frank */\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.sliceBlob = exports.consumeBlobParts = void 0;\nconst isFunction_1 = require(\"./isFunction\");\nconst CHUNK_SIZE = 65536;\nasync function* clonePart(part) {\n const end = part.byteOffset + part.byteLength;\n let position = part.byteOffset;\n while (position !== end) {\n const size = Math.min(end - position, CHUNK_SIZE);\n const chunk = part.buffer.slice(position, position + size);\n position += chunk.byteLength;\n yield new Uint8Array(chunk);\n }\n}\nasync function* consumeNodeBlob(blob) {\n let position = 0;\n while (position !== blob.size) {\n const chunk = blob.slice(position, Math.min(blob.size, position + CHUNK_SIZE));\n const buffer = await chunk.arrayBuffer();\n position += buffer.byteLength;\n yield new Uint8Array(buffer);\n }\n}\nasync function* consumeBlobParts(parts, clone = false) {\n for (const part of parts) {\n if (ArrayBuffer.isView(part)) {\n if (clone) {\n yield* clonePart(part);\n }\n else {\n yield part;\n }\n }\n else if ((0, isFunction_1.isFunction)(part.stream)) {\n yield* part.stream();\n }\n else {\n yield* consumeNodeBlob(part);\n }\n }\n}\nexports.consumeBlobParts = consumeBlobParts;\nfunction* sliceBlob(blobParts, blobSize, start = 0, end) {\n end !== null && end !== void 0 ? end : (end = blobSize);\n let relativeStart = start < 0\n ? Math.max(blobSize + start, 0)\n : Math.min(start, blobSize);\n let relativeEnd = end < 0\n ? Math.max(blobSize + end, 0)\n : Math.min(end, blobSize);\n const span = Math.max(relativeEnd - relativeStart, 0);\n let added = 0;\n for (const part of blobParts) {\n if (added >= span) {\n break;\n }\n const partSize = ArrayBuffer.isView(part) ? part.byteLength : part.size;\n if (relativeStart && partSize <= relativeStart) {\n relativeStart -= partSize;\n relativeEnd -= partSize;\n }\n else {\n let chunk;\n if (ArrayBuffer.isView(part)) {\n chunk = part.subarray(relativeStart, Math.min(partSize, relativeEnd));\n added += chunk.byteLength;\n }\n else {\n chunk = part.slice(relativeStart, Math.min(partSize, relativeEnd));\n added += chunk.size;\n }\n relativeEnd -= partSize;\n relativeStart = 0;\n yield chunk;\n }\n }\n}\nexports.sliceBlob = sliceBlob;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.deprecateConstructorEntries = void 0;\nconst util_1 = require(\"util\");\nexports.deprecateConstructorEntries = (0, util_1.deprecate)(() => { }, \"Constructor \\\"entries\\\" argument is not spec-compliant \"\n + \"and will be removed in next major release.\");\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nvar _FileFromPath_path, _FileFromPath_start;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fileFromPath = exports.fileFromPathSync = void 0;\nconst fs_1 = require(\"fs\");\nconst path_1 = require(\"path\");\nconst node_domexception_1 = __importDefault(require(\"node-domexception\"));\nconst File_1 = require(\"./File\");\nconst isPlainObject_1 = __importDefault(require(\"./isPlainObject\"));\n__exportStar(require(\"./isFile\"), exports);\nconst MESSAGE = \"The requested file could not be read, \"\n + \"typically due to permission problems that have occurred after a reference \"\n + \"to a file was acquired.\";\nclass FileFromPath {\n constructor(input) {\n _FileFromPath_path.set(this, void 0);\n _FileFromPath_start.set(this, void 0);\n __classPrivateFieldSet(this, _FileFromPath_path, input.path, \"f\");\n __classPrivateFieldSet(this, _FileFromPath_start, input.start || 0, \"f\");\n this.name = (0, path_1.basename)(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"));\n this.size = input.size;\n this.lastModified = input.lastModified;\n }\n slice(start, end) {\n return new FileFromPath({\n path: __classPrivateFieldGet(this, _FileFromPath_path, \"f\"),\n lastModified: this.lastModified,\n size: end - start,\n start\n });\n }\n async *stream() {\n const { mtimeMs } = await fs_1.promises.stat(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"));\n if (mtimeMs > this.lastModified) {\n throw new node_domexception_1.default(MESSAGE, \"NotReadableError\");\n }\n if (this.size) {\n yield* (0, fs_1.createReadStream)(__classPrivateFieldGet(this, _FileFromPath_path, \"f\"), {\n start: __classPrivateFieldGet(this, _FileFromPath_start, \"f\"),\n end: __classPrivateFieldGet(this, _FileFromPath_start, \"f\") + this.size - 1\n });\n }\n }\n get [(_FileFromPath_path = new WeakMap(), _FileFromPath_start = new WeakMap(), Symbol.toStringTag)]() {\n return \"File\";\n }\n}\nfunction createFileFromPath(path, { mtimeMs, size }, filenameOrOptions, options = {}) {\n let filename;\n if ((0, isPlainObject_1.default)(filenameOrOptions)) {\n [options, filename] = [filenameOrOptions, undefined];\n }\n else {\n filename = filenameOrOptions;\n }\n const file = new FileFromPath({ path, size, lastModified: mtimeMs });\n if (!filename) {\n filename = file.name;\n }\n return new File_1.File([file], filename, {\n ...options, lastModified: file.lastModified\n });\n}\nfunction fileFromPathSync(path, filenameOrOptions, options = {}) {\n const stats = (0, fs_1.statSync)(path);\n return createFileFromPath(path, stats, filenameOrOptions, options);\n}\nexports.fileFromPathSync = fileFromPathSync;\nasync function fileFromPath(path, filenameOrOptions, options) {\n const stats = await fs_1.promises.stat(path);\n return createFileFromPath(path, stats, filenameOrOptions, options);\n}\nexports.fileFromPath = fileFromPath;\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });\n}) : (function(o, m, k, k2) {\n if (k2 === undefined) k2 = k;\n o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n__exportStar(require(\"./FormData\"), exports);\n__exportStar(require(\"./Blob\"), exports);\n__exportStar(require(\"./File\"), exports);\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isBlob = void 0;\nconst Blob_1 = require(\"./Blob\");\nconst isBlob = (value) => value instanceof Blob_1.Blob;\nexports.isBlob = isBlob;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFile = void 0;\nconst File_1 = require(\"./File\");\nconst isFile = (value) => value instanceof File_1.File;\nexports.isFile = isFile;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isFunction = void 0;\nconst isFunction = (value) => (typeof value === \"function\");\nexports.isFunction = isFunction;\n","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nconst getType = (value) => (Object.prototype.toString.call(value).slice(8, -1).toLowerCase());\nfunction isPlainObject(value) {\n if (getType(value) !== \"object\") {\n return false;\n }\n const pp = Object.getPrototypeOf(value);\n if (pp === null || pp === undefined) {\n return true;\n }\n const Ctor = pp.constructor && pp.constructor.toString();\n return Ctor === Object.toString();\n}\nexports.default = isPlainObject;\n","\"use strict\";\n// translate the various posix character classes into unicode properties\n// this works across all unicode locales\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.parseClass = void 0;\n// { : [, /u flag required, negated]\nconst posixClasses = {\n '[:alnum:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}', true],\n '[:alpha:]': ['\\\\p{L}\\\\p{Nl}', true],\n '[:ascii:]': ['\\\\x' + '00-\\\\x' + '7f', false],\n '[:blank:]': ['\\\\p{Zs}\\\\t', true],\n '[:cntrl:]': ['\\\\p{Cc}', true],\n '[:digit:]': ['\\\\p{Nd}', true],\n '[:graph:]': ['\\\\p{Z}\\\\p{C}', true, true],\n '[:lower:]': ['\\\\p{Ll}', true],\n '[:print:]': ['\\\\p{C}', true],\n '[:punct:]': ['\\\\p{P}', true],\n '[:space:]': ['\\\\p{Z}\\\\t\\\\r\\\\n\\\\v\\\\f', true],\n '[:upper:]': ['\\\\p{Lu}', true],\n '[:word:]': ['\\\\p{L}\\\\p{Nl}\\\\p{Nd}\\\\p{Pc}', true],\n '[:xdigit:]': ['A-Fa-f0-9', false],\n};\n// only need to escape a few things inside of brace expressions\n// escapes: [ \\ ] -\nconst braceEscape = (s) => s.replace(/[[\\]\\\\-]/g, '\\\\$&');\n// escape all regexp magic characters\nconst regexpEscape = (s) => s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\n// everything has already been escaped, we just have to join\nconst rangesToString = (ranges) => ranges.join('');\n// takes a glob string at a posix brace expression, and returns\n// an equivalent regular expression source, and boolean indicating\n// whether the /u flag needs to be applied, and the number of chars\n// consumed to parse the character class.\n// This also removes out of order ranges, and returns ($.) if the\n// entire class just no good.\nconst parseClass = (glob, position) => {\n const pos = position;\n /* c8 ignore start */\n if (glob.charAt(pos) !== '[') {\n throw new Error('not in a brace expression');\n }\n /* c8 ignore stop */\n const ranges = [];\n const negs = [];\n let i = pos + 1;\n let sawStart = false;\n let uflag = false;\n let escaping = false;\n let negate = false;\n let endPos = pos;\n let rangeStart = '';\n WHILE: while (i < glob.length) {\n const c = glob.charAt(i);\n if ((c === '!' || c === '^') && i === pos + 1) {\n negate = true;\n i++;\n continue;\n }\n if (c === ']' && sawStart && !escaping) {\n endPos = i + 1;\n break;\n }\n sawStart = true;\n if (c === '\\\\') {\n if (!escaping) {\n escaping = true;\n i++;\n continue;\n }\n // escaped \\ char, fall through and treat like normal char\n }\n if (c === '[' && !escaping) {\n // either a posix class, a collation equivalent, or just a [\n for (const [cls, [unip, u, neg]] of Object.entries(posixClasses)) {\n if (glob.startsWith(cls, i)) {\n // invalid, [a-[] is fine, but not [a-[:alpha]]\n if (rangeStart) {\n return ['$.', false, glob.length - pos, true];\n }\n i += cls.length;\n if (neg)\n negs.push(unip);\n else\n ranges.push(unip);\n uflag = uflag || u;\n continue WHILE;\n }\n }\n }\n // now it's just a normal character, effectively\n escaping = false;\n if (rangeStart) {\n // throw this range away if it's not valid, but others\n // can still match.\n if (c > rangeStart) {\n ranges.push(braceEscape(rangeStart) + '-' + braceEscape(c));\n }\n else if (c === rangeStart) {\n ranges.push(braceEscape(c));\n }\n rangeStart = '';\n i++;\n continue;\n }\n // now might be the start of a range.\n // can be either c-d or c-] or c] or c] at this point\n if (glob.startsWith('-]', i + 1)) {\n ranges.push(braceEscape(c + '-'));\n i += 2;\n continue;\n }\n if (glob.startsWith('-', i + 1)) {\n rangeStart = c;\n i += 2;\n continue;\n }\n // not the start of a range, just a single character\n ranges.push(braceEscape(c));\n i++;\n }\n if (endPos < i) {\n // didn't see the end of the class, not a valid class,\n // but might still be valid as a literal match.\n return ['', false, 0, false];\n }\n // if we got no ranges and no negates, then we have a range that\n // cannot possibly match anything, and that poisons the whole glob\n if (!ranges.length && !negs.length) {\n return ['$.', false, glob.length - pos, true];\n }\n // if we got one positive range, and it's a single character, then that's\n // not actually a magic pattern, it's just that one literal character.\n // we should not treat that as \"magic\", we should just return the literal\n // character. [_] is a perfectly valid way to escape glob magic chars.\n if (negs.length === 0 &&\n ranges.length === 1 &&\n /^\\\\?.$/.test(ranges[0]) &&\n !negate) {\n const r = ranges[0].length === 2 ? ranges[0].slice(-1) : ranges[0];\n return [regexpEscape(r), false, endPos - pos, false];\n }\n const sranges = '[' + (negate ? '^' : '') + rangesToString(ranges) + ']';\n const snegs = '[' + (negate ? '' : '^') + rangesToString(negs) + ']';\n const comb = ranges.length && negs.length\n ? '(' + sranges + '|' + snegs + ')'\n : ranges.length\n ? sranges\n : snegs;\n return [comb, uflag, endPos - pos, true];\n};\nexports.parseClass = parseClass;\n//# sourceMappingURL=brace-expressions.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.escape = void 0;\n/**\n * Escape all magic characters in a glob pattern.\n *\n * If the {@link windowsPathsNoEscape | GlobOptions.windowsPathsNoEscape}\n * option is used, then characters are escaped by wrapping in `[]`, because\n * a magic character wrapped in a character class can only be satisfied by\n * that exact character. In this mode, `\\` is _not_ escaped, because it is\n * not interpreted as a magic character, but instead as a path separator.\n */\nconst escape = (s, { windowsPathsNoEscape = false, } = {}) => {\n // don't need to escape +@! because we escape the parens\n // that make those magic, and escaping ! as [!] isn't valid,\n // because [!]] is a valid glob class meaning not ']'.\n return windowsPathsNoEscape\n ? s.replace(/[?*()[\\]]/g, '[$&]')\n : s.replace(/[?*()[\\]\\\\]/g, '\\\\$&');\n};\nexports.escape = escape;\n//# sourceMappingURL=escape.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nconst index_js_1 = __importDefault(require(\"./index.js\"));\nmodule.exports = Object.assign(index_js_1.default, { default: index_js_1.default, minimatch: index_js_1.default });\n//# sourceMappingURL=index-cjs.js.map","\"use strict\";\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unescape = exports.escape = exports.Minimatch = exports.match = exports.makeRe = exports.braceExpand = exports.defaults = exports.filter = exports.GLOBSTAR = exports.sep = exports.minimatch = void 0;\nconst brace_expansion_1 = __importDefault(require(\"brace-expansion\"));\nconst brace_expressions_js_1 = require(\"./brace-expressions.js\");\nconst escape_js_1 = require(\"./escape.js\");\nconst unescape_js_1 = require(\"./unescape.js\");\nconst minimatch = (p, pattern, options = {}) => {\n assertValidPattern(pattern);\n // shortcut: comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n return false;\n }\n return new Minimatch(pattern, options).match(p);\n};\nexports.minimatch = minimatch;\nexports.default = exports.minimatch;\n// Optimized checking for the most common glob patterns.\nconst starDotExtRE = /^\\*+([^+@!?\\*\\[\\(]*)$/;\nconst starDotExtTest = (ext) => (f) => !f.startsWith('.') && f.endsWith(ext);\nconst starDotExtTestDot = (ext) => (f) => f.endsWith(ext);\nconst starDotExtTestNocase = (ext) => {\n ext = ext.toLowerCase();\n return (f) => !f.startsWith('.') && f.toLowerCase().endsWith(ext);\n};\nconst starDotExtTestNocaseDot = (ext) => {\n ext = ext.toLowerCase();\n return (f) => f.toLowerCase().endsWith(ext);\n};\nconst starDotStarRE = /^\\*+\\.\\*+$/;\nconst starDotStarTest = (f) => !f.startsWith('.') && f.includes('.');\nconst starDotStarTestDot = (f) => f !== '.' && f !== '..' && f.includes('.');\nconst dotStarRE = /^\\.\\*+$/;\nconst dotStarTest = (f) => f !== '.' && f !== '..' && f.startsWith('.');\nconst starRE = /^\\*+$/;\nconst starTest = (f) => f.length !== 0 && !f.startsWith('.');\nconst starTestDot = (f) => f.length !== 0 && f !== '.' && f !== '..';\nconst qmarksRE = /^\\?+([^+@!?\\*\\[\\(]*)?$/;\nconst qmarksTestNocase = ([$0, ext = '']) => {\n const noext = qmarksTestNoExt([$0]);\n if (!ext)\n return noext;\n ext = ext.toLowerCase();\n return (f) => noext(f) && f.toLowerCase().endsWith(ext);\n};\nconst qmarksTestNocaseDot = ([$0, ext = '']) => {\n const noext = qmarksTestNoExtDot([$0]);\n if (!ext)\n return noext;\n ext = ext.toLowerCase();\n return (f) => noext(f) && f.toLowerCase().endsWith(ext);\n};\nconst qmarksTestDot = ([$0, ext = '']) => {\n const noext = qmarksTestNoExtDot([$0]);\n return !ext ? noext : (f) => noext(f) && f.endsWith(ext);\n};\nconst qmarksTest = ([$0, ext = '']) => {\n const noext = qmarksTestNoExt([$0]);\n return !ext ? noext : (f) => noext(f) && f.endsWith(ext);\n};\nconst qmarksTestNoExt = ([$0]) => {\n const len = $0.length;\n return (f) => f.length === len && !f.startsWith('.');\n};\nconst qmarksTestNoExtDot = ([$0]) => {\n const len = $0.length;\n return (f) => f.length === len && f !== '.' && f !== '..';\n};\n/* c8 ignore start */\nconst defaultPlatform = (typeof process === 'object' && process\n ? (typeof process.env === 'object' &&\n process.env &&\n process.env.__MINIMATCH_TESTING_PLATFORM__) ||\n process.platform\n : 'posix');\nconst path = {\n win32: { sep: '\\\\' },\n posix: { sep: '/' },\n};\n/* c8 ignore stop */\nexports.sep = defaultPlatform === 'win32' ? path.win32.sep : path.posix.sep;\nexports.minimatch.sep = exports.sep;\nexports.GLOBSTAR = Symbol('globstar **');\nexports.minimatch.GLOBSTAR = exports.GLOBSTAR;\nconst plTypes = {\n '!': { open: '(?:(?!(?:', close: '))[^/]*?)' },\n '?': { open: '(?:', close: ')?' },\n '+': { open: '(?:', close: ')+' },\n '*': { open: '(?:', close: ')*' },\n '@': { open: '(?:', close: ')' },\n};\n// any single thing other than /\n// don't need to escape / when using new RegExp()\nconst qmark = '[^/]';\n// * => any number of characters\nconst star = qmark + '*?';\n// ** when dots are allowed. Anything goes, except .. and .\n// not (^ or / followed by one or two dots followed by $ or /),\n// followed by anything, any number of times.\nconst twoStarDot = '(?:(?!(?:\\\\/|^)(?:\\\\.{1,2})($|\\\\/)).)*?';\n// not a ^ or / followed by a dot,\n// followed by anything, any number of times.\nconst twoStarNoDot = '(?:(?!(?:\\\\/|^)\\\\.).)*?';\n// \"abc\" -> { a:true, b:true, c:true }\nconst charSet = (s) => s.split('').reduce((set, c) => {\n set[c] = true;\n return set;\n}, {});\n// characters that need to be escaped in RegExp.\nconst reSpecials = charSet('().*{}+?[]^$\\\\!');\n// characters that indicate we have to add the pattern start\nconst addPatternStartSet = charSet('[.(');\nconst filter = (pattern, options = {}) => (p) => (0, exports.minimatch)(p, pattern, options);\nexports.filter = filter;\nexports.minimatch.filter = exports.filter;\nconst ext = (a, b = {}) => Object.assign({}, a, b);\nconst defaults = (def) => {\n if (!def || typeof def !== 'object' || !Object.keys(def).length) {\n return exports.minimatch;\n }\n const orig = exports.minimatch;\n const m = (p, pattern, options = {}) => orig(p, pattern, ext(def, options));\n return Object.assign(m, {\n Minimatch: class Minimatch extends orig.Minimatch {\n constructor(pattern, options = {}) {\n super(pattern, ext(def, options));\n }\n static defaults(options) {\n return orig.defaults(ext(def, options)).Minimatch;\n }\n },\n unescape: (s, options = {}) => orig.unescape(s, ext(def, options)),\n escape: (s, options = {}) => orig.escape(s, ext(def, options)),\n filter: (pattern, options = {}) => orig.filter(pattern, ext(def, options)),\n defaults: (options) => orig.defaults(ext(def, options)),\n makeRe: (pattern, options = {}) => orig.makeRe(pattern, ext(def, options)),\n braceExpand: (pattern, options = {}) => orig.braceExpand(pattern, ext(def, options)),\n match: (list, pattern, options = {}) => orig.match(list, pattern, ext(def, options)),\n sep: orig.sep,\n GLOBSTAR: exports.GLOBSTAR,\n });\n};\nexports.defaults = defaults;\nexports.minimatch.defaults = exports.defaults;\n// Brace expansion:\n// a{b,c}d -> abd acd\n// a{b,}c -> abc ac\n// a{0..3}d -> a0d a1d a2d a3d\n// a{b,c{d,e}f}g -> abg acdfg acefg\n// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg\n//\n// Invalid sets are not expanded.\n// a{2..}b -> a{2..}b\n// a{b}c -> a{b}c\nconst braceExpand = (pattern, options = {}) => {\n assertValidPattern(pattern);\n // Thanks to Yeting Li for\n // improving this regexp to avoid a ReDOS vulnerability.\n if (options.nobrace || !/\\{(?:(?!\\{).)*\\}/.test(pattern)) {\n // shortcut. no need to expand.\n return [pattern];\n }\n return (0, brace_expansion_1.default)(pattern);\n};\nexports.braceExpand = braceExpand;\nexports.minimatch.braceExpand = exports.braceExpand;\nconst MAX_PATTERN_LENGTH = 1024 * 64;\nconst assertValidPattern = (pattern) => {\n if (typeof pattern !== 'string') {\n throw new TypeError('invalid pattern');\n }\n if (pattern.length > MAX_PATTERN_LENGTH) {\n throw new TypeError('pattern is too long');\n }\n};\n// parse a component of the expanded set.\n// At this point, no pattern may contain \"/\" in it\n// so we're going to return a 2d array, where each entry is the full\n// pattern, split on '/', and then turned into a regular expression.\n// A regexp is made at the end which joins each array with an\n// escaped /, and another full one which joins each regexp with |.\n//\n// Following the lead of Bash 4.1, note that \"**\" only has special meaning\n// when it is the *only* thing in a path portion. Otherwise, any series\n// of * is equivalent to a single *. Globstar behavior is enabled by\n// default, and can be disabled by setting options.noglobstar.\nconst makeRe = (pattern, options = {}) => new Minimatch(pattern, options).makeRe();\nexports.makeRe = makeRe;\nexports.minimatch.makeRe = exports.makeRe;\nconst match = (list, pattern, options = {}) => {\n const mm = new Minimatch(pattern, options);\n list = list.filter(f => mm.match(f));\n if (mm.options.nonull && !list.length) {\n list.push(pattern);\n }\n return list;\n};\nexports.match = match;\nexports.minimatch.match = exports.match;\n// replace stuff like \\* with *\nconst globUnescape = (s) => s.replace(/\\\\(.)/g, '$1');\nconst globMagic = /[?*]|[+@!]\\(.*?\\)|\\[|\\]/;\nconst regExpEscape = (s) => s.replace(/[-[\\]{}()*+?.,\\\\^$|#\\s]/g, '\\\\$&');\nclass Minimatch {\n options;\n set;\n pattern;\n windowsPathsNoEscape;\n nonegate;\n negate;\n comment;\n empty;\n preserveMultipleSlashes;\n partial;\n globSet;\n globParts;\n nocase;\n isWindows;\n platform;\n windowsNoMagicRoot;\n regexp;\n constructor(pattern, options = {}) {\n assertValidPattern(pattern);\n options = options || {};\n this.options = options;\n this.pattern = pattern;\n this.platform = options.platform || defaultPlatform;\n this.isWindows = this.platform === 'win32';\n this.windowsPathsNoEscape =\n !!options.windowsPathsNoEscape || options.allowWindowsEscape === false;\n if (this.windowsPathsNoEscape) {\n this.pattern = this.pattern.replace(/\\\\/g, '/');\n }\n this.preserveMultipleSlashes = !!options.preserveMultipleSlashes;\n this.regexp = null;\n this.negate = false;\n this.nonegate = !!options.nonegate;\n this.comment = false;\n this.empty = false;\n this.partial = !!options.partial;\n this.nocase = !!this.options.nocase;\n this.windowsNoMagicRoot =\n options.windowsNoMagicRoot !== undefined\n ? options.windowsNoMagicRoot\n : !!(this.isWindows && this.nocase);\n this.globSet = [];\n this.globParts = [];\n this.set = [];\n // make the set of regexps etc.\n this.make();\n }\n hasMagic() {\n if (this.options.magicalBraces && this.set.length > 1) {\n return true;\n }\n for (const pattern of this.set) {\n for (const part of pattern) {\n if (typeof part !== 'string')\n return true;\n }\n }\n return false;\n }\n debug(..._) { }\n make() {\n const pattern = this.pattern;\n const options = this.options;\n // empty patterns and comments match nothing.\n if (!options.nocomment && pattern.charAt(0) === '#') {\n this.comment = true;\n return;\n }\n if (!pattern) {\n this.empty = true;\n return;\n }\n // step 1: figure out negation, etc.\n this.parseNegate();\n // step 2: expand braces\n this.globSet = [...new Set(this.braceExpand())];\n if (options.debug) {\n this.debug = (...args) => console.error(...args);\n }\n this.debug(this.pattern, this.globSet);\n // step 3: now we have a set, so turn each one into a series of\n // path-portion matching patterns.\n // These will be regexps, except in the case of \"**\", which is\n // set to the GLOBSTAR object for globstar behavior,\n // and will not contain any / characters\n //\n // First, we preprocess to make the glob pattern sets a bit simpler\n // and deduped. There are some perf-killing patterns that can cause\n // problems with a glob walk, but we can simplify them down a bit.\n const rawGlobParts = this.globSet.map(s => this.slashSplit(s));\n this.globParts = this.preprocess(rawGlobParts);\n this.debug(this.pattern, this.globParts);\n // glob --> regexps\n let set = this.globParts.map((s, _, __) => {\n if (this.isWindows && this.windowsNoMagicRoot) {\n // check if it's a drive or unc path.\n const isUNC = s[0] === '' &&\n s[1] === '' &&\n (s[2] === '?' || !globMagic.test(s[2])) &&\n !globMagic.test(s[3]);\n const isDrive = /^[a-z]:/i.test(s[0]);\n if (isUNC) {\n return [...s.slice(0, 4), ...s.slice(4).map(ss => this.parse(ss))];\n }\n else if (isDrive) {\n return [s[0], ...s.slice(1).map(ss => this.parse(ss))];\n }\n }\n return s.map(ss => this.parse(ss));\n });\n this.debug(this.pattern, set);\n // filter out everything that didn't compile properly.\n this.set = set.filter(s => s.indexOf(false) === -1);\n // do not treat the ? in UNC paths as magic\n if (this.isWindows) {\n for (let i = 0; i < this.set.length; i++) {\n const p = this.set[i];\n if (p[0] === '' &&\n p[1] === '' &&\n this.globParts[i][2] === '?' &&\n typeof p[3] === 'string' &&\n /^[a-z]:$/i.test(p[3])) {\n p[2] = '?';\n }\n }\n }\n this.debug(this.pattern, this.set);\n }\n // various transforms to equivalent pattern sets that are\n // faster to process in a filesystem walk. The goal is to\n // eliminate what we can, and push all ** patterns as far\n // to the right as possible, even if it increases the number\n // of patterns that we have to process.\n preprocess(globParts) {\n // if we're not in globstar mode, then turn all ** into *\n if (this.options.noglobstar) {\n for (let i = 0; i < globParts.length; i++) {\n for (let j = 0; j < globParts[i].length; j++) {\n if (globParts[i][j] === '**') {\n globParts[i][j] = '*';\n }\n }\n }\n }\n const { optimizationLevel = 1 } = this.options;\n if (optimizationLevel >= 2) {\n // aggressive optimization for the purpose of fs walking\n globParts = this.firstPhasePreProcess(globParts);\n globParts = this.secondPhasePreProcess(globParts);\n }\n else if (optimizationLevel >= 1) {\n // just basic optimizations to remove some .. parts\n globParts = this.levelOneOptimize(globParts);\n }\n else {\n globParts = this.adjascentGlobstarOptimize(globParts);\n }\n return globParts;\n }\n // just get rid of adjascent ** portions\n adjascentGlobstarOptimize(globParts) {\n return globParts.map(parts => {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let i = gs;\n while (parts[i + 1] === '**') {\n i++;\n }\n if (i !== gs) {\n parts.splice(gs, i - gs);\n }\n }\n return parts;\n });\n }\n // get rid of adjascent ** and resolve .. portions\n levelOneOptimize(globParts) {\n return globParts.map(parts => {\n parts = parts.reduce((set, part) => {\n const prev = set[set.length - 1];\n if (part === '**' && prev === '**') {\n return set;\n }\n if (part === '..') {\n if (prev && prev !== '..' && prev !== '.' && prev !== '**') {\n set.pop();\n return set;\n }\n }\n set.push(part);\n return set;\n }, []);\n return parts.length === 0 ? [''] : parts;\n });\n }\n levelTwoFileOptimize(parts) {\n if (!Array.isArray(parts)) {\n parts = this.slashSplit(parts);\n }\n let didSomething = false;\n do {\n didSomething = false;\n //
// -> 
/\n            if (!this.preserveMultipleSlashes) {\n                for (let i = 1; i < parts.length - 1; i++) {\n                    const p = parts[i];\n                    // don't squeeze out UNC patterns\n                    if (i === 1 && p === '' && parts[0] === '')\n                        continue;\n                    if (p === '.' || p === '') {\n                        didSomething = true;\n                        parts.splice(i, 1);\n                        i--;\n                    }\n                }\n                if (parts[0] === '.' &&\n                    parts.length === 2 &&\n                    (parts[1] === '.' || parts[1] === '')) {\n                    didSomething = true;\n                    parts.pop();\n                }\n            }\n            // 
/

/../ ->

/\n            let dd = 0;\n            while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n                const p = parts[dd - 1];\n                if (p && p !== '.' && p !== '..' && p !== '**') {\n                    didSomething = true;\n                    parts.splice(dd - 1, 2);\n                    dd -= 2;\n                }\n            }\n        } while (didSomething);\n        return parts.length === 0 ? [''] : parts;\n    }\n    // First phase: single-pattern processing\n    // 
 is 1 or more portions\n    //  is 1 or more portions\n    // 

is any portion other than ., .., '', or **\n // is . or ''\n //\n // **/.. is *brutal* for filesystem walking performance, because\n // it effectively resets the recursive walk each time it occurs,\n // and ** cannot be reduced out by a .. pattern part like a regexp\n // or most strings (other than .., ., and '') can be.\n //\n //

/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n //

// -> 
/\n    // 
/

/../ ->

/\n    // **/**/ -> **/\n    //\n    // **/*/ -> */**/ <== not valid because ** doesn't follow\n    // this WOULD be allowed if ** did follow symlinks, or * didn't\n    firstPhasePreProcess(globParts) {\n        let didSomething = false;\n        do {\n            didSomething = false;\n            // 
/**/../

/

/ -> {

/../

/

/,

/**/

/

/}\n for (let parts of globParts) {\n let gs = -1;\n while (-1 !== (gs = parts.indexOf('**', gs + 1))) {\n let gss = gs;\n while (parts[gss + 1] === '**') {\n //

/**/**/ -> 
/**/\n                        gss++;\n                    }\n                    // eg, if gs is 2 and gss is 4, that means we have 3 **\n                    // parts, and can remove 2 of them.\n                    if (gss > gs) {\n                        parts.splice(gs + 1, gss - gs);\n                    }\n                    let next = parts[gs + 1];\n                    const p = parts[gs + 2];\n                    const p2 = parts[gs + 3];\n                    if (next !== '..')\n                        continue;\n                    if (!p ||\n                        p === '.' ||\n                        p === '..' ||\n                        !p2 ||\n                        p2 === '.' ||\n                        p2 === '..') {\n                        continue;\n                    }\n                    didSomething = true;\n                    // edit parts in place, and push the new one\n                    parts.splice(gs, 1);\n                    const other = parts.slice(0);\n                    other[gs] = '**';\n                    globParts.push(other);\n                    gs--;\n                }\n                // 
// -> 
/\n                if (!this.preserveMultipleSlashes) {\n                    for (let i = 1; i < parts.length - 1; i++) {\n                        const p = parts[i];\n                        // don't squeeze out UNC patterns\n                        if (i === 1 && p === '' && parts[0] === '')\n                            continue;\n                        if (p === '.' || p === '') {\n                            didSomething = true;\n                            parts.splice(i, 1);\n                            i--;\n                        }\n                    }\n                    if (parts[0] === '.' &&\n                        parts.length === 2 &&\n                        (parts[1] === '.' || parts[1] === '')) {\n                        didSomething = true;\n                        parts.pop();\n                    }\n                }\n                // 
/

/../ ->

/\n                let dd = 0;\n                while (-1 !== (dd = parts.indexOf('..', dd + 1))) {\n                    const p = parts[dd - 1];\n                    if (p && p !== '.' && p !== '..' && p !== '**') {\n                        didSomething = true;\n                        const needDot = dd === 1 && parts[dd + 1] === '**';\n                        const splin = needDot ? ['.'] : [];\n                        parts.splice(dd - 1, 2, ...splin);\n                        if (parts.length === 0)\n                            parts.push('');\n                        dd -= 2;\n                    }\n                }\n            }\n        } while (didSomething);\n        return globParts;\n    }\n    // second phase: multi-pattern dedupes\n    // {
/*/,
/

/} ->

/*/\n    // {
/,
/} -> 
/\n    // {
/**/,
/} -> 
/**/\n    //\n    // {
/**/,
/**/

/} ->

/**/\n    // ^-- not valid because ** doens't follow symlinks\n    secondPhasePreProcess(globParts) {\n        for (let i = 0; i < globParts.length - 1; i++) {\n            for (let j = i + 1; j < globParts.length; j++) {\n                const matched = this.partsMatch(globParts[i], globParts[j], !this.preserveMultipleSlashes);\n                if (!matched)\n                    continue;\n                globParts[i] = matched;\n                globParts[j] = [];\n            }\n        }\n        return globParts.filter(gs => gs.length);\n    }\n    partsMatch(a, b, emptyGSMatch = false) {\n        let ai = 0;\n        let bi = 0;\n        let result = [];\n        let which = '';\n        while (ai < a.length && bi < b.length) {\n            if (a[ai] === b[bi]) {\n                result.push(which === 'b' ? b[bi] : a[ai]);\n                ai++;\n                bi++;\n            }\n            else if (emptyGSMatch && a[ai] === '**' && b[bi] === a[ai + 1]) {\n                result.push(a[ai]);\n                ai++;\n            }\n            else if (emptyGSMatch && b[bi] === '**' && a[ai] === b[bi + 1]) {\n                result.push(b[bi]);\n                bi++;\n            }\n            else if (a[ai] === '*' &&\n                b[bi] &&\n                (this.options.dot || !b[bi].startsWith('.')) &&\n                b[bi] !== '**') {\n                if (which === 'b')\n                    return false;\n                which = 'a';\n                result.push(a[ai]);\n                ai++;\n                bi++;\n            }\n            else if (b[bi] === '*' &&\n                a[ai] &&\n                (this.options.dot || !a[ai].startsWith('.')) &&\n                a[ai] !== '**') {\n                if (which === 'a')\n                    return false;\n                which = 'b';\n                result.push(b[bi]);\n                ai++;\n                bi++;\n            }\n            else {\n                return false;\n            }\n        }\n        // if we fall out of the loop, it means they two are identical\n        // as long as their lengths match\n        return a.length === b.length && result;\n    }\n    parseNegate() {\n        if (this.nonegate)\n            return;\n        const pattern = this.pattern;\n        let negate = false;\n        let negateOffset = 0;\n        for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {\n            negate = !negate;\n            negateOffset++;\n        }\n        if (negateOffset)\n            this.pattern = pattern.slice(negateOffset);\n        this.negate = negate;\n    }\n    // set partial to true to test if, for example,\n    // \"/a/b\" matches the start of \"/*/b/*/d\"\n    // Partial means, if you run out of file before you run\n    // out of pattern, then that's fine, as long as all\n    // the parts match.\n    matchOne(file, pattern, partial = false) {\n        const options = this.options;\n        // a UNC pattern like //?/c:/* can match a path like c:/x\n        // and vice versa\n        if (this.isWindows) {\n            const fileUNC = file[0] === '' &&\n                file[1] === '' &&\n                file[2] === '?' &&\n                typeof file[3] === 'string' &&\n                /^[a-z]:$/i.test(file[3]);\n            const patternUNC = pattern[0] === '' &&\n                pattern[1] === '' &&\n                pattern[2] === '?' &&\n                typeof pattern[3] === 'string' &&\n                /^[a-z]:$/i.test(pattern[3]);\n            if (fileUNC && patternUNC) {\n                const fd = file[3];\n                const pd = pattern[3];\n                if (fd.toLowerCase() === pd.toLowerCase()) {\n                    file[3] = pd;\n                }\n            }\n            else if (patternUNC && typeof file[0] === 'string') {\n                const pd = pattern[3];\n                const fd = file[0];\n                if (pd.toLowerCase() === fd.toLowerCase()) {\n                    pattern[3] = fd;\n                    pattern = pattern.slice(3);\n                }\n            }\n            else if (fileUNC && typeof pattern[0] === 'string') {\n                const fd = file[3];\n                if (fd.toLowerCase() === pattern[0].toLowerCase()) {\n                    pattern[0] = fd;\n                    file = file.slice(3);\n                }\n            }\n        }\n        // resolve and reduce . and .. portions in the file as well.\n        // dont' need to do the second phase, because it's only one string[]\n        const { optimizationLevel = 1 } = this.options;\n        if (optimizationLevel >= 2) {\n            file = this.levelTwoFileOptimize(file);\n        }\n        this.debug('matchOne', this, { file, pattern });\n        this.debug('matchOne', file.length, pattern.length);\n        for (var fi = 0, pi = 0, fl = file.length, pl = pattern.length; fi < fl && pi < pl; fi++, pi++) {\n            this.debug('matchOne loop');\n            var p = pattern[pi];\n            var f = file[fi];\n            this.debug(pattern, p, f);\n            // should be impossible.\n            // some invalid regexp stuff in the set.\n            /* c8 ignore start */\n            if (p === false) {\n                return false;\n            }\n            /* c8 ignore stop */\n            if (p === exports.GLOBSTAR) {\n                this.debug('GLOBSTAR', [pattern, p, f]);\n                // \"**\"\n                // a/**/b/**/c would match the following:\n                // a/b/x/y/z/c\n                // a/x/y/z/b/c\n                // a/b/x/b/x/c\n                // a/b/c\n                // To do this, take the rest of the pattern after\n                // the **, and see if it would match the file remainder.\n                // If so, return success.\n                // If not, the ** \"swallows\" a segment, and try again.\n                // This is recursively awful.\n                //\n                // a/**/b/**/c matching a/b/x/y/z/c\n                // - a matches a\n                // - doublestar\n                //   - matchOne(b/x/y/z/c, b/**/c)\n                //     - b matches b\n                //     - doublestar\n                //       - matchOne(x/y/z/c, c) -> no\n                //       - matchOne(y/z/c, c) -> no\n                //       - matchOne(z/c, c) -> no\n                //       - matchOne(c, c) yes, hit\n                var fr = fi;\n                var pr = pi + 1;\n                if (pr === pl) {\n                    this.debug('** at the end');\n                    // a ** at the end will just swallow the rest.\n                    // We have found a match.\n                    // however, it will not swallow /.x, unless\n                    // options.dot is set.\n                    // . and .. are *never* matched by **, for explosively\n                    // exponential reasons.\n                    for (; fi < fl; fi++) {\n                        if (file[fi] === '.' ||\n                            file[fi] === '..' ||\n                            (!options.dot && file[fi].charAt(0) === '.'))\n                            return false;\n                    }\n                    return true;\n                }\n                // ok, let's see if we can swallow whatever we can.\n                while (fr < fl) {\n                    var swallowee = file[fr];\n                    this.debug('\\nglobstar while', file, fr, pattern, pr, swallowee);\n                    // XXX remove this slice.  Just pass the start index.\n                    if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {\n                        this.debug('globstar found match!', fr, fl, swallowee);\n                        // found a match.\n                        return true;\n                    }\n                    else {\n                        // can't swallow \".\" or \"..\" ever.\n                        // can only swallow \".foo\" when explicitly asked.\n                        if (swallowee === '.' ||\n                            swallowee === '..' ||\n                            (!options.dot && swallowee.charAt(0) === '.')) {\n                            this.debug('dot detected!', file, fr, pattern, pr);\n                            break;\n                        }\n                        // ** swallows a segment, and continue.\n                        this.debug('globstar swallow a segment, and continue');\n                        fr++;\n                    }\n                }\n                // no match was found.\n                // However, in partial mode, we can't say this is necessarily over.\n                /* c8 ignore start */\n                if (partial) {\n                    // ran out of file\n                    this.debug('\\n>>> no match, partial?', file, fr, pattern, pr);\n                    if (fr === fl) {\n                        return true;\n                    }\n                }\n                /* c8 ignore stop */\n                return false;\n            }\n            // something other than **\n            // non-magic patterns just have to match exactly\n            // patterns with magic have been turned into regexps.\n            let hit;\n            if (typeof p === 'string') {\n                hit = f === p;\n                this.debug('string match', p, f, hit);\n            }\n            else {\n                hit = p.test(f);\n                this.debug('pattern match', p, f, hit);\n            }\n            if (!hit)\n                return false;\n        }\n        // Note: ending in / means that we'll get a final \"\"\n        // at the end of the pattern.  This can only match a\n        // corresponding \"\" at the end of the file.\n        // If the file ends in /, then it can only match a\n        // a pattern that ends in /, unless the pattern just\n        // doesn't have any more for it. But, a/b/ should *not*\n        // match \"a/b/*\", even though \"\" matches against the\n        // [^/]*? pattern, except in partial mode, where it might\n        // simply not be reached yet.\n        // However, a/b/ should still satisfy a/*\n        // now either we fell off the end of the pattern, or we're done.\n        if (fi === fl && pi === pl) {\n            // ran out of pattern and filename at the same time.\n            // an exact hit!\n            return true;\n        }\n        else if (fi === fl) {\n            // ran out of file, but still had pattern left.\n            // this is ok if we're doing the match as part of\n            // a glob fs traversal.\n            return partial;\n        }\n        else if (pi === pl) {\n            // ran out of pattern, still have file left.\n            // this is only acceptable if we're on the very last\n            // empty segment of a file with a trailing slash.\n            // a/* should match a/b/\n            return fi === fl - 1 && file[fi] === '';\n            /* c8 ignore start */\n        }\n        else {\n            // should be unreachable.\n            throw new Error('wtf?');\n        }\n        /* c8 ignore stop */\n    }\n    braceExpand() {\n        return (0, exports.braceExpand)(this.pattern, this.options);\n    }\n    parse(pattern) {\n        assertValidPattern(pattern);\n        const options = this.options;\n        // shortcuts\n        if (pattern === '**')\n            return exports.GLOBSTAR;\n        if (pattern === '')\n            return '';\n        // far and away, the most common glob pattern parts are\n        // *, *.*, and *.  Add a fast check method for those.\n        let m;\n        let fastTest = null;\n        if ((m = pattern.match(starRE))) {\n            fastTest = options.dot ? starTestDot : starTest;\n        }\n        else if ((m = pattern.match(starDotExtRE))) {\n            fastTest = (options.nocase\n                ? options.dot\n                    ? starDotExtTestNocaseDot\n                    : starDotExtTestNocase\n                : options.dot\n                    ? starDotExtTestDot\n                    : starDotExtTest)(m[1]);\n        }\n        else if ((m = pattern.match(qmarksRE))) {\n            fastTest = (options.nocase\n                ? options.dot\n                    ? qmarksTestNocaseDot\n                    : qmarksTestNocase\n                : options.dot\n                    ? qmarksTestDot\n                    : qmarksTest)(m);\n        }\n        else if ((m = pattern.match(starDotStarRE))) {\n            fastTest = options.dot ? starDotStarTestDot : starDotStarTest;\n        }\n        else if ((m = pattern.match(dotStarRE))) {\n            fastTest = dotStarTest;\n        }\n        let re = '';\n        let hasMagic = false;\n        let escaping = false;\n        // ? => one single character\n        const patternListStack = [];\n        const negativeLists = [];\n        let stateChar = false;\n        let uflag = false;\n        let pl;\n        // . and .. never match anything that doesn't start with .,\n        // even when options.dot is set.  However, if the pattern\n        // starts with ., then traversal patterns can match.\n        let dotTravAllowed = pattern.charAt(0) === '.';\n        let dotFileAllowed = options.dot || dotTravAllowed;\n        const patternStart = () => dotTravAllowed\n            ? ''\n            : dotFileAllowed\n                ? '(?!(?:^|\\\\/)\\\\.{1,2}(?:$|\\\\/))'\n                : '(?!\\\\.)';\n        const subPatternStart = (p) => p.charAt(0) === '.'\n            ? ''\n            : options.dot\n                ? '(?!(?:^|\\\\/)\\\\.{1,2}(?:$|\\\\/))'\n                : '(?!\\\\.)';\n        const clearStateChar = () => {\n            if (stateChar) {\n                // we had some state-tracking character\n                // that wasn't consumed by this pass.\n                switch (stateChar) {\n                    case '*':\n                        re += star;\n                        hasMagic = true;\n                        break;\n                    case '?':\n                        re += qmark;\n                        hasMagic = true;\n                        break;\n                    default:\n                        re += '\\\\' + stateChar;\n                        break;\n                }\n                this.debug('clearStateChar %j %j', stateChar, re);\n                stateChar = false;\n            }\n        };\n        for (let i = 0, c; i < pattern.length && (c = pattern.charAt(i)); i++) {\n            this.debug('%s\\t%s %s %j', pattern, i, re, c);\n            // skip over any that are escaped.\n            if (escaping) {\n                // completely not allowed, even escaped.\n                // should be impossible.\n                /* c8 ignore start */\n                if (c === '/') {\n                    return false;\n                }\n                /* c8 ignore stop */\n                if (reSpecials[c]) {\n                    re += '\\\\';\n                }\n                re += c;\n                escaping = false;\n                continue;\n            }\n            switch (c) {\n                // Should already be path-split by now.\n                /* c8 ignore start */\n                case '/': {\n                    return false;\n                }\n                /* c8 ignore stop */\n                case '\\\\':\n                    clearStateChar();\n                    escaping = true;\n                    continue;\n                // the various stateChar values\n                // for the \"extglob\" stuff.\n                case '?':\n                case '*':\n                case '+':\n                case '@':\n                case '!':\n                    this.debug('%s\\t%s %s %j <-- stateChar', pattern, i, re, c);\n                    // if we already have a stateChar, then it means\n                    // that there was something like ** or +? in there.\n                    // Handle the stateChar, then proceed with this one.\n                    this.debug('call clearStateChar %j', stateChar);\n                    clearStateChar();\n                    stateChar = c;\n                    // if extglob is disabled, then +(asdf|foo) isn't a thing.\n                    // just clear the statechar *now*, rather than even diving into\n                    // the patternList stuff.\n                    if (options.noext)\n                        clearStateChar();\n                    continue;\n                case '(': {\n                    if (!stateChar) {\n                        re += '\\\\(';\n                        continue;\n                    }\n                    const plEntry = {\n                        type: stateChar,\n                        start: i - 1,\n                        reStart: re.length,\n                        open: plTypes[stateChar].open,\n                        close: plTypes[stateChar].close,\n                    };\n                    this.debug(this.pattern, '\\t', plEntry);\n                    patternListStack.push(plEntry);\n                    // negation is (?:(?!(?:js)(?:))[^/]*)\n                    re += plEntry.open;\n                    // next entry starts with a dot maybe?\n                    if (plEntry.start === 0 && plEntry.type !== '!') {\n                        dotTravAllowed = true;\n                        re += subPatternStart(pattern.slice(i + 1));\n                    }\n                    this.debug('plType %j %j', stateChar, re);\n                    stateChar = false;\n                    continue;\n                }\n                case ')': {\n                    const plEntry = patternListStack[patternListStack.length - 1];\n                    if (!plEntry) {\n                        re += '\\\\)';\n                        continue;\n                    }\n                    patternListStack.pop();\n                    // closing an extglob\n                    clearStateChar();\n                    hasMagic = true;\n                    pl = plEntry;\n                    // negation is (?:(?!js)[^/]*)\n                    // The others are (?:)\n                    re += pl.close;\n                    if (pl.type === '!') {\n                        negativeLists.push(Object.assign(pl, { reEnd: re.length }));\n                    }\n                    continue;\n                }\n                case '|': {\n                    const plEntry = patternListStack[patternListStack.length - 1];\n                    if (!plEntry) {\n                        re += '\\\\|';\n                        continue;\n                    }\n                    clearStateChar();\n                    re += '|';\n                    // next subpattern can start with a dot?\n                    if (plEntry.start === 0 && plEntry.type !== '!') {\n                        dotTravAllowed = true;\n                        re += subPatternStart(pattern.slice(i + 1));\n                    }\n                    continue;\n                }\n                // these are mostly the same in regexp and glob\n                case '[':\n                    // swallow any state-tracking char before the [\n                    clearStateChar();\n                    const [src, needUflag, consumed, magic] = (0, brace_expressions_js_1.parseClass)(pattern, i);\n                    if (consumed) {\n                        re += src;\n                        uflag = uflag || needUflag;\n                        i += consumed - 1;\n                        hasMagic = hasMagic || magic;\n                    }\n                    else {\n                        re += '\\\\[';\n                    }\n                    continue;\n                case ']':\n                    re += '\\\\' + c;\n                    continue;\n                default:\n                    // swallow any state char that wasn't consumed\n                    clearStateChar();\n                    re += regExpEscape(c);\n                    break;\n            } // switch\n        } // for\n        // handle the case where we had a +( thing at the *end*\n        // of the pattern.\n        // each pattern list stack adds 3 chars, and we need to go through\n        // and escape any | chars that were passed through as-is for the regexp.\n        // Go through and escape them, taking care not to double-escape any\n        // | chars that were already escaped.\n        for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {\n            let tail;\n            tail = re.slice(pl.reStart + pl.open.length);\n            this.debug(this.pattern, 'setting tail', re, pl);\n            // maybe some even number of \\, then maybe 1 \\, followed by a |\n            tail = tail.replace(/((?:\\\\{2}){0,64})(\\\\?)\\|/g, (_, $1, $2) => {\n                if (!$2) {\n                    // the | isn't already escaped, so escape it.\n                    $2 = '\\\\';\n                    // should already be done\n                    /* c8 ignore start */\n                }\n                /* c8 ignore stop */\n                // need to escape all those slashes *again*, without escaping the\n                // one that we need for escaping the | character.  As it works out,\n                // escaping an even number of slashes can be done by simply repeating\n                // it exactly after itself.  That's why this trick works.\n                //\n                // I am sorry that you have to see this.\n                return $1 + $1 + $2 + '|';\n            });\n            this.debug('tail=%j\\n   %s', tail, tail, pl, re);\n            const t = pl.type === '*' ? star : pl.type === '?' ? qmark : '\\\\' + pl.type;\n            hasMagic = true;\n            re = re.slice(0, pl.reStart) + t + '\\\\(' + tail;\n        }\n        // handle trailing things that only matter at the very end.\n        clearStateChar();\n        if (escaping) {\n            // trailing \\\\\n            re += '\\\\\\\\';\n        }\n        // only need to apply the nodot start if the re starts with\n        // something that could conceivably capture a dot\n        const addPatternStart = addPatternStartSet[re.charAt(0)];\n        // Hack to work around lack of negative lookbehind in JS\n        // A pattern like: *.!(x).!(y|z) needs to ensure that a name\n        // like 'a.xyz.yz' doesn't match.  So, the first negative\n        // lookahead, has to look ALL the way ahead, to the end of\n        // the pattern.\n        for (let n = negativeLists.length - 1; n > -1; n--) {\n            const nl = negativeLists[n];\n            const nlBefore = re.slice(0, nl.reStart);\n            const nlFirst = re.slice(nl.reStart, nl.reEnd - 8);\n            let nlAfter = re.slice(nl.reEnd);\n            const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter;\n            // Handle nested stuff like *(*.js|!(*.json)), where open parens\n            // mean that we should *not* include the ) in the bit that is considered\n            // \"after\" the negated section.\n            const closeParensBefore = nlBefore.split(')').length;\n            const openParensBefore = nlBefore.split('(').length - closeParensBefore;\n            let cleanAfter = nlAfter;\n            for (let i = 0; i < openParensBefore; i++) {\n                cleanAfter = cleanAfter.replace(/\\)[+*?]?/, '');\n            }\n            nlAfter = cleanAfter;\n            const dollar = nlAfter === '' ? '(?:$|\\\\/)' : '';\n            re = nlBefore + nlFirst + nlAfter + dollar + nlLast;\n        }\n        // if the re is not \"\" at this point, then we need to make sure\n        // it doesn't match against an empty path part.\n        // Otherwise a/* will match a/, which it should not.\n        if (re !== '' && hasMagic) {\n            re = '(?=.)' + re;\n        }\n        if (addPatternStart) {\n            re = patternStart() + re;\n        }\n        // if it's nocase, and the lcase/uppercase don't match, it's magic\n        if (options.nocase && !hasMagic && !options.nocaseMagicOnly) {\n            hasMagic = pattern.toUpperCase() !== pattern.toLowerCase();\n        }\n        // skip the regexp for non-magical patterns\n        // unescape anything in it, though, so that it'll be\n        // an exact match against a file etc.\n        if (!hasMagic) {\n            return globUnescape(re);\n        }\n        const flags = (options.nocase ? 'i' : '') + (uflag ? 'u' : '');\n        try {\n            const ext = fastTest\n                ? {\n                    _glob: pattern,\n                    _src: re,\n                    test: fastTest,\n                }\n                : {\n                    _glob: pattern,\n                    _src: re,\n                };\n            return Object.assign(new RegExp('^' + re + '$', flags), ext);\n            /* c8 ignore start */\n        }\n        catch (er) {\n            // should be impossible\n            // If it was an invalid regular expression, then it can't match\n            // anything.  This trick looks for a character after the end of\n            // the string, which is of course impossible, except in multi-line\n            // mode, but it's not a /m regex.\n            this.debug('invalid regexp', er);\n            return new RegExp('$.');\n        }\n        /* c8 ignore stop */\n    }\n    makeRe() {\n        if (this.regexp || this.regexp === false)\n            return this.regexp;\n        // at this point, this.set is a 2d array of partial\n        // pattern strings, or \"**\".\n        //\n        // It's better to use .match().  This function shouldn't\n        // be used, really, but it's pretty convenient sometimes,\n        // when you just want to work with a regex.\n        const set = this.set;\n        if (!set.length) {\n            this.regexp = false;\n            return this.regexp;\n        }\n        const options = this.options;\n        const twoStar = options.noglobstar\n            ? star\n            : options.dot\n                ? twoStarDot\n                : twoStarNoDot;\n        const flags = options.nocase ? 'i' : '';\n        // regexpify non-globstar patterns\n        // if ** is only item, then we just do one twoStar\n        // if ** is first, and there are more, prepend (\\/|twoStar\\/)? to next\n        // if ** is last, append (\\/twoStar|) to previous\n        // if ** is in the middle, append (\\/|\\/twoStar\\/) to previous\n        // then filter out GLOBSTAR symbols\n        let re = set\n            .map(pattern => {\n            const pp = pattern.map(p => typeof p === 'string'\n                ? regExpEscape(p)\n                : p === exports.GLOBSTAR\n                    ? exports.GLOBSTAR\n                    : p._src);\n            pp.forEach((p, i) => {\n                const next = pp[i + 1];\n                const prev = pp[i - 1];\n                if (p !== exports.GLOBSTAR || prev === exports.GLOBSTAR) {\n                    return;\n                }\n                if (prev === undefined) {\n                    if (next !== undefined && next !== exports.GLOBSTAR) {\n                        pp[i + 1] = '(?:\\\\/|' + twoStar + '\\\\/)?' + next;\n                    }\n                    else {\n                        pp[i] = twoStar;\n                    }\n                }\n                else if (next === undefined) {\n                    pp[i - 1] = prev + '(?:\\\\/|' + twoStar + ')?';\n                }\n                else if (next !== exports.GLOBSTAR) {\n                    pp[i - 1] = prev + '(?:\\\\/|\\\\/' + twoStar + '\\\\/)' + next;\n                    pp[i + 1] = exports.GLOBSTAR;\n                }\n            });\n            return pp.filter(p => p !== exports.GLOBSTAR).join('/');\n        })\n            .join('|');\n        // must match entire pattern\n        // ending in a * or ** will make it less strict.\n        re = '^(?:' + re + ')$';\n        // can match anything, as long as it's not this.\n        if (this.negate)\n            re = '^(?!' + re + ').*$';\n        try {\n            this.regexp = new RegExp(re, flags);\n            /* c8 ignore start */\n        }\n        catch (ex) {\n            // should be impossible\n            this.regexp = false;\n        }\n        /* c8 ignore stop */\n        return this.regexp;\n    }\n    slashSplit(p) {\n        // if p starts with // on windows, we preserve that\n        // so that UNC paths aren't broken.  Otherwise, any number of\n        // / characters are coalesced into one, unless\n        // preserveMultipleSlashes is set to true.\n        if (this.preserveMultipleSlashes) {\n            return p.split('/');\n        }\n        else if (this.isWindows && /^\\/\\/[^\\/]+/.test(p)) {\n            // add an extra '' for the one we lose\n            return ['', ...p.split(/\\/+/)];\n        }\n        else {\n            return p.split(/\\/+/);\n        }\n    }\n    match(f, partial = this.partial) {\n        this.debug('match', f, this.pattern);\n        // short-circuit in the case of busted things.\n        // comments, etc.\n        if (this.comment) {\n            return false;\n        }\n        if (this.empty) {\n            return f === '';\n        }\n        if (f === '/' && partial) {\n            return true;\n        }\n        const options = this.options;\n        // windows: need to use /, not \\\n        if (this.isWindows) {\n            f = f.split('\\\\').join('/');\n        }\n        // treat the test path as a set of pathparts.\n        const ff = this.slashSplit(f);\n        this.debug(this.pattern, 'split', ff);\n        // just ONE of the pattern sets in this.set needs to match\n        // in order for it to be valid.  If negating, then just one\n        // match means that we have failed.\n        // Either way, return on the first hit.\n        const set = this.set;\n        this.debug(this.pattern, 'set', set);\n        // Find the basename of the path by looking for the last non-empty segment\n        let filename = ff[ff.length - 1];\n        if (!filename) {\n            for (let i = ff.length - 2; !filename && i >= 0; i--) {\n                filename = ff[i];\n            }\n        }\n        for (let i = 0; i < set.length; i++) {\n            const pattern = set[i];\n            let file = ff;\n            if (options.matchBase && pattern.length === 1) {\n                file = [filename];\n            }\n            const hit = this.matchOne(file, pattern, partial);\n            if (hit) {\n                if (options.flipNegate) {\n                    return true;\n                }\n                return !this.negate;\n            }\n        }\n        // didn't get any hits.  this is success if it's a negative\n        // pattern, failure otherwise.\n        if (options.flipNegate) {\n            return false;\n        }\n        return this.negate;\n    }\n    static defaults(def) {\n        return exports.minimatch.defaults(def).Minimatch;\n    }\n}\nexports.Minimatch = Minimatch;\n/* c8 ignore start */\nvar escape_js_2 = require(\"./escape.js\");\nObject.defineProperty(exports, \"escape\", { enumerable: true, get: function () { return escape_js_2.escape; } });\nvar unescape_js_2 = require(\"./unescape.js\");\nObject.defineProperty(exports, \"unescape\", { enumerable: true, get: function () { return unescape_js_2.unescape; } });\n/* c8 ignore stop */\nexports.minimatch.Minimatch = Minimatch;\nexports.minimatch.escape = escape_js_1.escape;\nexports.minimatch.unescape = unescape_js_1.unescape;\n//# sourceMappingURL=index.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.unescape = void 0;\n/**\n * Un-escape a string that has been escaped with {@link escape}.\n *\n * If the {@link windowsPathsNoEscape} option is used, then square-brace\n * escapes are removed, but not backslash escapes.  For example, it will turn\n * the string `'[*]'` into `*`, but it will not turn `'\\\\*'` into `'*'`,\n * becuase `\\` is a path separator in `windowsPathsNoEscape` mode.\n *\n * When `windowsPathsNoEscape` is not set, then both brace escapes and\n * backslash escapes are removed.\n *\n * Slashes (and backslashes in `windowsPathsNoEscape` mode) cannot be escaped\n * or unescaped.\n */\nconst unescape = (s, { windowsPathsNoEscape = false, } = {}) => {\n    return windowsPathsNoEscape\n        ? s.replace(/\\[([^\\/\\\\])\\]/g, '$1')\n        : s.replace(/((?!\\\\).|^)\\[([^\\/\\\\])\\]/g, '$1$2').replace(/\\\\([^\\/])/g, '$1');\n};\nexports.unescape = unescape;\n//# sourceMappingURL=unescape.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MultipartBody = void 0;\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nclass MultipartBody {\n    constructor(body) {\n        this.body = body;\n    }\n    get [Symbol.toStringTag]() {\n        return 'MultipartBody';\n    }\n}\nexports.MultipartBody = MultipartBody;\n//# sourceMappingURL=MultipartBody.js.map","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n    for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\n__exportStar(require(\"../node-runtime.js\"), exports);\n//# sourceMappingURL=runtime-node.js.map","/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nconst shims = require('./registry');\nconst auto = require('openai/_shims/auto/runtime');\nif (!shims.kind) shims.setShims(auto.getRuntime(), { auto: true });\nfor (const property of Object.keys(shims)) {\n  Object.defineProperty(exports, property, {\n    get() {\n      return shims[property];\n    },\n  });\n}\n","\"use strict\";\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar __importDefault = (this && this.__importDefault) || function (mod) {\n    return (mod && mod.__esModule) ? mod : { \"default\": mod };\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.getRuntime = void 0;\n/**\n * Disclaimer: modules in _shims aren't intended to be imported by SDK users.\n */\nconst nf = __importStar(require(\"node-fetch\"));\nconst fd = __importStar(require(\"formdata-node\"));\nconst agentkeepalive_1 = __importDefault(require(\"agentkeepalive\"));\nconst abort_controller_1 = require(\"abort-controller\");\nconst node_fs_1 = require(\"node:fs\");\nconst form_data_encoder_1 = require(\"form-data-encoder\");\nconst node_stream_1 = require(\"node:stream\");\nconst MultipartBody_1 = require(\"./MultipartBody.js\");\n// @ts-ignore (this package does not have proper export maps for this export)\nconst ponyfill_es2018_js_1 = require(\"web-streams-polyfill/dist/ponyfill.es2018.js\");\nlet fileFromPathWarned = false;\nasync function fileFromPath(path, ...args) {\n    // this import fails in environments that don't handle export maps correctly, like old versions of Jest\n    const { fileFromPath: _fileFromPath } = await Promise.resolve().then(() => __importStar(require('formdata-node/file-from-path')));\n    if (!fileFromPathWarned) {\n        console.warn(`fileFromPath is deprecated; use fs.createReadStream(${JSON.stringify(path)}) instead`);\n        fileFromPathWarned = true;\n    }\n    // @ts-ignore\n    return await _fileFromPath(path, ...args);\n}\nconst defaultHttpAgent = new agentkeepalive_1.default({ keepAlive: true, timeout: 5 * 60 * 1000 });\nconst defaultHttpsAgent = new agentkeepalive_1.default.HttpsAgent({ keepAlive: true, timeout: 5 * 60 * 1000 });\nasync function getMultipartRequestOptions(form, opts) {\n    const encoder = new form_data_encoder_1.FormDataEncoder(form);\n    const readable = node_stream_1.Readable.from(encoder);\n    const body = new MultipartBody_1.MultipartBody(readable);\n    const headers = {\n        ...opts.headers,\n        ...encoder.headers,\n        'Content-Length': encoder.contentLength,\n    };\n    return { ...opts, body: body, headers };\n}\nfunction getRuntime() {\n    // Polyfill global object if needed.\n    if (typeof AbortController === 'undefined') {\n        // @ts-expect-error (the types are subtly different, but compatible in practice)\n        globalThis.AbortController = abort_controller_1.AbortController;\n    }\n    return {\n        kind: 'node',\n        fetch: nf.default,\n        Request: nf.Request,\n        Response: nf.Response,\n        Headers: nf.Headers,\n        FormData: fd.FormData,\n        Blob: fd.Blob,\n        File: fd.File,\n        ReadableStream: ponyfill_es2018_js_1.ReadableStream,\n        getMultipartRequestOptions,\n        getDefaultAgent: (url) => (url.startsWith('https') ? defaultHttpsAgent : defaultHttpAgent),\n        fileFromPath,\n        isFsReadStream: (value) => value instanceof node_fs_1.ReadStream,\n    };\n}\nexports.getRuntime = getRuntime;\n//# sourceMappingURL=node-runtime.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.setShims = exports.isFsReadStream = exports.fileFromPath = exports.getDefaultAgent = exports.getMultipartRequestOptions = exports.ReadableStream = exports.File = exports.Blob = exports.FormData = exports.Headers = exports.Response = exports.Request = exports.fetch = exports.kind = exports.auto = void 0;\nexports.auto = false;\nexports.kind = undefined;\nexports.fetch = undefined;\nexports.Request = undefined;\nexports.Response = undefined;\nexports.Headers = undefined;\nexports.FormData = undefined;\nexports.Blob = undefined;\nexports.File = undefined;\nexports.ReadableStream = undefined;\nexports.getMultipartRequestOptions = undefined;\nexports.getDefaultAgent = undefined;\nexports.fileFromPath = undefined;\nexports.isFsReadStream = undefined;\nfunction setShims(shims, options = { auto: false }) {\n    if (exports.auto) {\n        throw new Error(`you must \\`import 'openai/shims/${shims.kind}'\\` before importing anything else from openai`);\n    }\n    if (exports.kind) {\n        throw new Error(`can't \\`import 'openai/shims/${shims.kind}'\\` after \\`import 'openai/shims/${exports.kind}'\\``);\n    }\n    exports.auto = options.auto;\n    exports.kind = shims.kind;\n    exports.fetch = shims.fetch;\n    exports.Request = shims.Request;\n    exports.Response = shims.Response;\n    exports.Headers = shims.Headers;\n    exports.FormData = shims.FormData;\n    exports.Blob = shims.Blob;\n    exports.File = shims.File;\n    exports.ReadableStream = shims.ReadableStream;\n    exports.getMultipartRequestOptions = shims.getMultipartRequestOptions;\n    exports.getDefaultAgent = shims.getDefaultAgent;\n    exports.fileFromPath = shims.fileFromPath;\n    exports.isFsReadStream = shims.isFsReadStream;\n}\nexports.setShims = setShims;\n//# sourceMappingURL=registry.js.map","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _AbstractPage_client;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.toBase64 = exports.getRequiredHeader = exports.isHeadersProtocol = exports.isRunningInBrowser = exports.debug = exports.hasOwn = exports.isEmptyObj = exports.maybeCoerceBoolean = exports.maybeCoerceFloat = exports.maybeCoerceInteger = exports.coerceBoolean = exports.coerceFloat = exports.coerceInteger = exports.readEnv = exports.ensurePresent = exports.castToError = exports.sleep = exports.safeJSON = exports.isRequestOptions = exports.createResponseHeaders = exports.PagePromise = exports.AbstractPage = exports.APIClient = exports.APIPromise = exports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = void 0;\nconst version_1 = require(\"./version.js\");\nconst streaming_1 = require(\"./streaming.js\");\nconst error_1 = require(\"./error.js\");\nconst index_1 = require(\"./_shims/index.js\");\nconst uploads_1 = require(\"./uploads.js\");\nvar uploads_2 = require(\"./uploads.js\");\nObject.defineProperty(exports, \"maybeMultipartFormRequestOptions\", { enumerable: true, get: function () { return uploads_2.maybeMultipartFormRequestOptions; } });\nObject.defineProperty(exports, \"multipartFormRequestOptions\", { enumerable: true, get: function () { return uploads_2.multipartFormRequestOptions; } });\nObject.defineProperty(exports, \"createForm\", { enumerable: true, get: function () { return uploads_2.createForm; } });\nasync function defaultParseResponse(props) {\n    const { response } = props;\n    if (props.options.stream) {\n        debug('response', response.status, response.url, response.headers, response.body);\n        // Note: there is an invariant here that isn't represented in the type system\n        // that if you set `stream: true` the response type must also be `Stream`\n        return streaming_1.Stream.fromSSEResponse(response, props.controller);\n    }\n    // fetch refuses to read the body when the status code is 204.\n    if (response.status === 204) {\n        return null;\n    }\n    if (props.options.__binaryResponse) {\n        return response;\n    }\n    const contentType = response.headers.get('content-type');\n    if (contentType?.includes('application/json')) {\n        const json = await response.json();\n        debug('response', response.status, response.url, response.headers, json);\n        return json;\n    }\n    const text = await response.text();\n    debug('response', response.status, response.url, response.headers, text);\n    // TODO handle blob, arraybuffer, other content types, etc.\n    return text;\n}\n/**\n * A subclass of `Promise` providing additional helper methods\n * for interacting with the SDK.\n */\nclass APIPromise extends Promise {\n    constructor(responsePromise, parseResponse = defaultParseResponse) {\n        super((resolve) => {\n            // this is maybe a bit weird but this has to be a no-op to not implicitly\n            // parse the response body; instead .then, .catch, .finally are overridden\n            // to parse the response\n            resolve(null);\n        });\n        this.responsePromise = responsePromise;\n        this.parseResponse = parseResponse;\n    }\n    _thenUnwrap(transform) {\n        return new APIPromise(this.responsePromise, async (props) => transform(await this.parseResponse(props)));\n    }\n    /**\n     * Gets the raw `Response` instance instead of parsing the response\n     * data.\n     *\n     * If you want to parse the response body but still get the `Response`\n     * instance, you can use {@link withResponse()}.\n     *\n     * 👋 Getting the wrong TypeScript type for `Response`?\n     * Try setting `\"moduleResolution\": \"NodeNext\"` if you can,\n     * or add one of these imports before your first `import … from 'openai'`:\n     * - `import 'openai/shims/node'` (if you're running on Node)\n     * - `import 'openai/shims/web'` (otherwise)\n     */\n    asResponse() {\n        return this.responsePromise.then((p) => p.response);\n    }\n    /**\n     * Gets the parsed response data and the raw `Response` instance.\n     *\n     * If you just want to get the raw `Response` instance without parsing it,\n     * you can use {@link asResponse()}.\n     *\n     *\n     * 👋 Getting the wrong TypeScript type for `Response`?\n     * Try setting `\"moduleResolution\": \"NodeNext\"` if you can,\n     * or add one of these imports before your first `import … from 'openai'`:\n     * - `import 'openai/shims/node'` (if you're running on Node)\n     * - `import 'openai/shims/web'` (otherwise)\n     */\n    async withResponse() {\n        const [data, response] = await Promise.all([this.parse(), this.asResponse()]);\n        return { data, response };\n    }\n    parse() {\n        if (!this.parsedPromise) {\n            this.parsedPromise = this.responsePromise.then(this.parseResponse);\n        }\n        return this.parsedPromise;\n    }\n    then(onfulfilled, onrejected) {\n        return this.parse().then(onfulfilled, onrejected);\n    }\n    catch(onrejected) {\n        return this.parse().catch(onrejected);\n    }\n    finally(onfinally) {\n        return this.parse().finally(onfinally);\n    }\n}\nexports.APIPromise = APIPromise;\nclass APIClient {\n    constructor({ baseURL, maxRetries = 2, timeout = 600000, // 10 minutes\n    httpAgent, fetch: overridenFetch, }) {\n        this.baseURL = baseURL;\n        this.maxRetries = validatePositiveInteger('maxRetries', maxRetries);\n        this.timeout = validatePositiveInteger('timeout', timeout);\n        this.httpAgent = httpAgent;\n        this.fetch = overridenFetch ?? index_1.fetch;\n    }\n    authHeaders(opts) {\n        return {};\n    }\n    /**\n     * Override this to add your own default headers, for example:\n     *\n     *  {\n     *    ...super.defaultHeaders(),\n     *    Authorization: 'Bearer 123',\n     *  }\n     */\n    defaultHeaders(opts) {\n        return {\n            Accept: 'application/json',\n            'Content-Type': 'application/json',\n            'User-Agent': this.getUserAgent(),\n            ...getPlatformHeaders(),\n            ...this.authHeaders(opts),\n        };\n    }\n    /**\n     * Override this to add your own headers validation:\n     */\n    validateHeaders(headers, customHeaders) { }\n    defaultIdempotencyKey() {\n        return `stainless-node-retry-${uuid4()}`;\n    }\n    get(path, opts) {\n        return this.methodRequest('get', path, opts);\n    }\n    post(path, opts) {\n        return this.methodRequest('post', path, opts);\n    }\n    patch(path, opts) {\n        return this.methodRequest('patch', path, opts);\n    }\n    put(path, opts) {\n        return this.methodRequest('put', path, opts);\n    }\n    delete(path, opts) {\n        return this.methodRequest('delete', path, opts);\n    }\n    methodRequest(method, path, opts) {\n        return this.request(Promise.resolve(opts).then((opts) => ({ method, path, ...opts })));\n    }\n    getAPIList(path, Page, opts) {\n        return this.requestAPIList(Page, { method: 'get', path, ...opts });\n    }\n    calculateContentLength(body) {\n        if (typeof body === 'string') {\n            if (typeof Buffer !== 'undefined') {\n                return Buffer.byteLength(body, 'utf8').toString();\n            }\n            if (typeof TextEncoder !== 'undefined') {\n                const encoder = new TextEncoder();\n                const encoded = encoder.encode(body);\n                return encoded.length.toString();\n            }\n        }\n        return null;\n    }\n    buildRequest(options) {\n        const { method, path, query, headers: headers = {} } = options;\n        const body = (0, uploads_1.isMultipartBody)(options.body) ? options.body.body\n            : options.body ? JSON.stringify(options.body, null, 2)\n                : null;\n        const contentLength = this.calculateContentLength(body);\n        const url = this.buildURL(path, query);\n        if ('timeout' in options)\n            validatePositiveInteger('timeout', options.timeout);\n        const timeout = options.timeout ?? this.timeout;\n        const httpAgent = options.httpAgent ?? this.httpAgent ?? (0, index_1.getDefaultAgent)(url);\n        const minAgentTimeout = timeout + 1000;\n        if (typeof httpAgent?.options?.timeout === 'number' &&\n            minAgentTimeout > (httpAgent.options.timeout ?? 0)) {\n            // Allow any given request to bump our agent active socket timeout.\n            // This may seem strange, but leaking active sockets should be rare and not particularly problematic,\n            // and without mutating agent we would need to create more of them.\n            // This tradeoff optimizes for performance.\n            httpAgent.options.timeout = minAgentTimeout;\n        }\n        if (this.idempotencyHeader && method !== 'get') {\n            if (!options.idempotencyKey)\n                options.idempotencyKey = this.defaultIdempotencyKey();\n            headers[this.idempotencyHeader] = options.idempotencyKey;\n        }\n        const reqHeaders = {\n            ...(contentLength && { 'Content-Length': contentLength }),\n            ...this.defaultHeaders(options),\n            ...headers,\n        };\n        // let builtin fetch set the Content-Type for multipart bodies\n        if ((0, uploads_1.isMultipartBody)(options.body) && index_1.kind !== 'node') {\n            delete reqHeaders['Content-Type'];\n        }\n        // Strip any headers being explicitly omitted with null\n        Object.keys(reqHeaders).forEach((key) => reqHeaders[key] === null && delete reqHeaders[key]);\n        const req = {\n            method,\n            ...(body && { body: body }),\n            headers: reqHeaders,\n            ...(httpAgent && { agent: httpAgent }),\n            // @ts-ignore node-fetch uses a custom AbortSignal type that is\n            // not compatible with standard web types\n            signal: options.signal ?? null,\n        };\n        this.validateHeaders(reqHeaders, headers);\n        return { req, url, timeout };\n    }\n    /**\n     * Used as a callback for mutating the given `RequestInit` object.\n     *\n     * This is useful for cases where you want to add certain headers based off of\n     * the request properties, e.g. `method` or `url`.\n     */\n    async prepareRequest(request, { url, options }) { }\n    parseHeaders(headers) {\n        return (!headers ? {}\n            : Symbol.iterator in headers ?\n                Object.fromEntries(Array.from(headers).map((header) => [...header]))\n                : { ...headers });\n    }\n    makeStatusError(status, error, message, headers) {\n        return error_1.APIError.generate(status, error, message, headers);\n    }\n    request(options, remainingRetries = null) {\n        return new APIPromise(this.makeRequest(options, remainingRetries));\n    }\n    async makeRequest(optionsInput, retriesRemaining) {\n        const options = await optionsInput;\n        if (retriesRemaining == null) {\n            retriesRemaining = options.maxRetries ?? this.maxRetries;\n        }\n        const { req, url, timeout } = this.buildRequest(options);\n        await this.prepareRequest(req, { url, options });\n        debug('request', url, options, req.headers);\n        if (options.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        const controller = new AbortController();\n        const response = await this.fetchWithTimeout(url, req, timeout, controller).catch(exports.castToError);\n        if (response instanceof Error) {\n            if (options.signal?.aborted) {\n                throw new error_1.APIUserAbortError();\n            }\n            if (retriesRemaining) {\n                return this.retryRequest(options, retriesRemaining);\n            }\n            if (response.name === 'AbortError') {\n                throw new error_1.APIConnectionTimeoutError();\n            }\n            throw new error_1.APIConnectionError({ cause: response });\n        }\n        const responseHeaders = (0, exports.createResponseHeaders)(response.headers);\n        if (!response.ok) {\n            if (retriesRemaining && this.shouldRetry(response)) {\n                return this.retryRequest(options, retriesRemaining, responseHeaders);\n            }\n            const errText = await response.text().catch((e) => (0, exports.castToError)(e).message);\n            const errJSON = (0, exports.safeJSON)(errText);\n            const errMessage = errJSON ? undefined : errText;\n            debug('response', response.status, url, responseHeaders, errMessage);\n            const err = this.makeStatusError(response.status, errJSON, errMessage, responseHeaders);\n            throw err;\n        }\n        return { response, options, controller };\n    }\n    requestAPIList(Page, options) {\n        const request = this.makeRequest(options, null);\n        return new PagePromise(this, request, Page);\n    }\n    buildURL(path, query) {\n        const url = isAbsoluteURL(path) ?\n            new URL(path)\n            : new URL(this.baseURL + (this.baseURL.endsWith('/') && path.startsWith('/') ? path.slice(1) : path));\n        const defaultQuery = this.defaultQuery();\n        if (!isEmptyObj(defaultQuery)) {\n            query = { ...defaultQuery, ...query };\n        }\n        if (query) {\n            url.search = this.stringifyQuery(query);\n        }\n        return url.toString();\n    }\n    stringifyQuery(query) {\n        return Object.entries(query)\n            .filter(([_, value]) => typeof value !== 'undefined')\n            .map(([key, value]) => {\n            if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {\n                return `${encodeURIComponent(key)}=${encodeURIComponent(value)}`;\n            }\n            if (value === null) {\n                return `${encodeURIComponent(key)}=`;\n            }\n            throw new error_1.OpenAIError(`Cannot stringify type ${typeof value}; Expected string, number, boolean, or null. If you need to pass nested query parameters, you can manually encode them, e.g. { query: { 'foo[key1]': value1, 'foo[key2]': value2 } }, and please open a GitHub issue requesting better support for your use case.`);\n        })\n            .join('&');\n    }\n    async fetchWithTimeout(url, init, ms, controller) {\n        const { signal, ...options } = init || {};\n        if (signal)\n            signal.addEventListener('abort', () => controller.abort());\n        const timeout = setTimeout(() => controller.abort(), ms);\n        return (this.getRequestClient()\n            // use undefined this binding; fetch errors if bound to something else in browser/cloudflare\n            .fetch.call(undefined, url, { signal: controller.signal, ...options })\n            .finally(() => {\n            clearTimeout(timeout);\n        }));\n    }\n    getRequestClient() {\n        return { fetch: this.fetch };\n    }\n    shouldRetry(response) {\n        // Note this is not a standard header.\n        const shouldRetryHeader = response.headers.get('x-should-retry');\n        // If the server explicitly says whether or not to retry, obey.\n        if (shouldRetryHeader === 'true')\n            return true;\n        if (shouldRetryHeader === 'false')\n            return false;\n        // Retry on request timeouts.\n        if (response.status === 408)\n            return true;\n        // Retry on lock timeouts.\n        if (response.status === 409)\n            return true;\n        // Retry on rate limits.\n        if (response.status === 429)\n            return true;\n        // Retry internal errors.\n        if (response.status >= 500)\n            return true;\n        return false;\n    }\n    async retryRequest(options, retriesRemaining, responseHeaders) {\n        // About the Retry-After header: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Retry-After\n        let timeoutMillis;\n        const retryAfterHeader = responseHeaders?.['retry-after'];\n        if (retryAfterHeader) {\n            const timeoutSeconds = parseInt(retryAfterHeader);\n            if (!Number.isNaN(timeoutSeconds)) {\n                timeoutMillis = timeoutSeconds * 1000;\n            }\n            else {\n                timeoutMillis = Date.parse(retryAfterHeader) - Date.now();\n            }\n        }\n        // If the API asks us to wait a certain amount of time (and it's a reasonable amount),\n        // just do what it says, but otherwise calculate a default\n        if (!timeoutMillis ||\n            !Number.isInteger(timeoutMillis) ||\n            timeoutMillis <= 0 ||\n            timeoutMillis > 60 * 1000) {\n            const maxRetries = options.maxRetries ?? this.maxRetries;\n            timeoutMillis = this.calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries);\n        }\n        await (0, exports.sleep)(timeoutMillis);\n        return this.makeRequest(options, retriesRemaining - 1);\n    }\n    calculateDefaultRetryTimeoutMillis(retriesRemaining, maxRetries) {\n        const initialRetryDelay = 0.5;\n        const maxRetryDelay = 8.0;\n        const numRetries = maxRetries - retriesRemaining;\n        // Apply exponential backoff, but not more than the max.\n        const sleepSeconds = Math.min(initialRetryDelay * Math.pow(2, numRetries), maxRetryDelay);\n        // Apply some jitter, take up to at most 25 percent of the retry time.\n        const jitter = 1 - Math.random() * 0.25;\n        return sleepSeconds * jitter * 1000;\n    }\n    getUserAgent() {\n        return `${this.constructor.name}/JS ${version_1.VERSION}`;\n    }\n}\nexports.APIClient = APIClient;\nclass AbstractPage {\n    constructor(client, response, body, options) {\n        _AbstractPage_client.set(this, void 0);\n        __classPrivateFieldSet(this, _AbstractPage_client, client, \"f\");\n        this.options = options;\n        this.response = response;\n        this.body = body;\n    }\n    hasNextPage() {\n        const items = this.getPaginatedItems();\n        if (!items.length)\n            return false;\n        return this.nextPageInfo() != null;\n    }\n    async getNextPage() {\n        const nextInfo = this.nextPageInfo();\n        if (!nextInfo) {\n            throw new error_1.OpenAIError('No next page expected; please check `.hasNextPage()` before calling `.getNextPage()`.');\n        }\n        const nextOptions = { ...this.options };\n        if ('params' in nextInfo) {\n            nextOptions.query = { ...nextOptions.query, ...nextInfo.params };\n        }\n        else if ('url' in nextInfo) {\n            const params = [...Object.entries(nextOptions.query || {}), ...nextInfo.url.searchParams.entries()];\n            for (const [key, value] of params) {\n                nextInfo.url.searchParams.set(key, value);\n            }\n            nextOptions.query = undefined;\n            nextOptions.path = nextInfo.url.toString();\n        }\n        return await __classPrivateFieldGet(this, _AbstractPage_client, \"f\").requestAPIList(this.constructor, nextOptions);\n    }\n    async *iterPages() {\n        // eslint-disable-next-line @typescript-eslint/no-this-alias\n        let page = this;\n        yield page;\n        while (page.hasNextPage()) {\n            page = await page.getNextPage();\n            yield page;\n        }\n    }\n    async *[(_AbstractPage_client = new WeakMap(), Symbol.asyncIterator)]() {\n        for await (const page of this.iterPages()) {\n            for (const item of page.getPaginatedItems()) {\n                yield item;\n            }\n        }\n    }\n}\nexports.AbstractPage = AbstractPage;\n/**\n * This subclass of Promise will resolve to an instantiated Page once the request completes.\n *\n * It also implements AsyncIterable to allow auto-paginating iteration on an unawaited list call, eg:\n *\n *    for await (const item of client.items.list()) {\n *      console.log(item)\n *    }\n */\nclass PagePromise extends APIPromise {\n    constructor(client, request, Page) {\n        super(request, async (props) => new Page(client, props.response, await defaultParseResponse(props), props.options));\n    }\n    /**\n     * Allow auto-paginating iteration on an unawaited list call, eg:\n     *\n     *    for await (const item of client.items.list()) {\n     *      console.log(item)\n     *    }\n     */\n    async *[Symbol.asyncIterator]() {\n        const page = await this;\n        for await (const item of page) {\n            yield item;\n        }\n    }\n}\nexports.PagePromise = PagePromise;\nconst createResponseHeaders = (headers) => {\n    return new Proxy(Object.fromEntries(\n    // @ts-ignore\n    headers.entries()), {\n        get(target, name) {\n            const key = name.toString();\n            return target[key.toLowerCase()] || target[key];\n        },\n    });\n};\nexports.createResponseHeaders = createResponseHeaders;\n// This is required so that we can determine if a given object matches the RequestOptions\n// type at runtime. While this requires duplication, it is enforced by the TypeScript\n// compiler such that any missing / extraneous keys will cause an error.\nconst requestOptionsKeys = {\n    method: true,\n    path: true,\n    query: true,\n    body: true,\n    headers: true,\n    maxRetries: true,\n    stream: true,\n    timeout: true,\n    httpAgent: true,\n    signal: true,\n    idempotencyKey: true,\n    __binaryResponse: true,\n};\nconst isRequestOptions = (obj) => {\n    return (typeof obj === 'object' &&\n        obj !== null &&\n        !isEmptyObj(obj) &&\n        Object.keys(obj).every((k) => hasOwn(requestOptionsKeys, k)));\n};\nexports.isRequestOptions = isRequestOptions;\nconst getPlatformProperties = () => {\n    if (typeof Deno !== 'undefined' && Deno.build != null) {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': normalizePlatform(Deno.build.os),\n            'X-Stainless-Arch': normalizeArch(Deno.build.arch),\n            'X-Stainless-Runtime': 'deno',\n            'X-Stainless-Runtime-Version': Deno.version,\n        };\n    }\n    if (typeof EdgeRuntime !== 'undefined') {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': 'Unknown',\n            'X-Stainless-Arch': `other:${EdgeRuntime}`,\n            'X-Stainless-Runtime': 'edge',\n            'X-Stainless-Runtime-Version': process.version,\n        };\n    }\n    // Check if Node.js\n    if (Object.prototype.toString.call(typeof process !== 'undefined' ? process : 0) === '[object process]') {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': normalizePlatform(process.platform),\n            'X-Stainless-Arch': normalizeArch(process.arch),\n            'X-Stainless-Runtime': 'node',\n            'X-Stainless-Runtime-Version': process.version,\n        };\n    }\n    const browserInfo = getBrowserInfo();\n    if (browserInfo) {\n        return {\n            'X-Stainless-Lang': 'js',\n            'X-Stainless-Package-Version': version_1.VERSION,\n            'X-Stainless-OS': 'Unknown',\n            'X-Stainless-Arch': 'unknown',\n            'X-Stainless-Runtime': `browser:${browserInfo.browser}`,\n            'X-Stainless-Runtime-Version': browserInfo.version,\n        };\n    }\n    // TODO add support for Cloudflare workers, etc.\n    return {\n        'X-Stainless-Lang': 'js',\n        'X-Stainless-Package-Version': version_1.VERSION,\n        'X-Stainless-OS': 'Unknown',\n        'X-Stainless-Arch': 'unknown',\n        'X-Stainless-Runtime': 'unknown',\n        'X-Stainless-Runtime-Version': 'unknown',\n    };\n};\n// Note: modified from https://github.com/JS-DevTools/host-environment/blob/b1ab79ecde37db5d6e163c050e54fe7d287d7c92/src/isomorphic.browser.ts\nfunction getBrowserInfo() {\n    if (typeof navigator === 'undefined' || !navigator) {\n        return null;\n    }\n    // NOTE: The order matters here!\n    const browserPatterns = [\n        { key: 'edge', pattern: /Edge(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'ie', pattern: /MSIE(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'ie', pattern: /Trident(?:.*rv\\:(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'chrome', pattern: /Chrome(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'firefox', pattern: /Firefox(?:\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?/ },\n        { key: 'safari', pattern: /(?:Version\\W+(\\d+)\\.(\\d+)(?:\\.(\\d+))?)?(?:\\W+Mobile\\S*)?\\W+Safari/ },\n    ];\n    // Find the FIRST matching browser\n    for (const { key, pattern } of browserPatterns) {\n        const match = pattern.exec(navigator.userAgent);\n        if (match) {\n            const major = match[1] || 0;\n            const minor = match[2] || 0;\n            const patch = match[3] || 0;\n            return { browser: key, version: `${major}.${minor}.${patch}` };\n        }\n    }\n    return null;\n}\nconst normalizeArch = (arch) => {\n    // Node docs:\n    // - https://nodejs.org/api/process.html#processarch\n    // Deno docs:\n    // - https://doc.deno.land/deno/stable/~/Deno.build\n    if (arch === 'x32')\n        return 'x32';\n    if (arch === 'x86_64' || arch === 'x64')\n        return 'x64';\n    if (arch === 'arm')\n        return 'arm';\n    if (arch === 'aarch64' || arch === 'arm64')\n        return 'arm64';\n    if (arch)\n        return `other:${arch}`;\n    return 'unknown';\n};\nconst normalizePlatform = (platform) => {\n    // Node platforms:\n    // - https://nodejs.org/api/process.html#processplatform\n    // Deno platforms:\n    // - https://doc.deno.land/deno/stable/~/Deno.build\n    // - https://github.com/denoland/deno/issues/14799\n    platform = platform.toLowerCase();\n    // NOTE: this iOS check is untested and may not work\n    // Node does not work natively on IOS, there is a fork at\n    // https://github.com/nodejs-mobile/nodejs-mobile\n    // however it is unknown at the time of writing how to detect if it is running\n    if (platform.includes('ios'))\n        return 'iOS';\n    if (platform === 'android')\n        return 'Android';\n    if (platform === 'darwin')\n        return 'MacOS';\n    if (platform === 'win32')\n        return 'Windows';\n    if (platform === 'freebsd')\n        return 'FreeBSD';\n    if (platform === 'openbsd')\n        return 'OpenBSD';\n    if (platform === 'linux')\n        return 'Linux';\n    if (platform)\n        return `Other:${platform}`;\n    return 'Unknown';\n};\nlet _platformHeaders;\nconst getPlatformHeaders = () => {\n    return (_platformHeaders ?? (_platformHeaders = getPlatformProperties()));\n};\nconst safeJSON = (text) => {\n    try {\n        return JSON.parse(text);\n    }\n    catch (err) {\n        return undefined;\n    }\n};\nexports.safeJSON = safeJSON;\n// https://stackoverflow.com/a/19709846\nconst startsWithSchemeRegexp = new RegExp('^(?:[a-z]+:)?//', 'i');\nconst isAbsoluteURL = (url) => {\n    return startsWithSchemeRegexp.test(url);\n};\nconst sleep = (ms) => new Promise((resolve) => setTimeout(resolve, ms));\nexports.sleep = sleep;\nconst validatePositiveInteger = (name, n) => {\n    if (typeof n !== 'number' || !Number.isInteger(n)) {\n        throw new error_1.OpenAIError(`${name} must be an integer`);\n    }\n    if (n < 0) {\n        throw new error_1.OpenAIError(`${name} must be a positive integer`);\n    }\n    return n;\n};\nconst castToError = (err) => {\n    if (err instanceof Error)\n        return err;\n    return new Error(err);\n};\nexports.castToError = castToError;\nconst ensurePresent = (value) => {\n    if (value == null)\n        throw new error_1.OpenAIError(`Expected a value to be given but received ${value} instead.`);\n    return value;\n};\nexports.ensurePresent = ensurePresent;\n/**\n * Read an environment variable.\n *\n * Will return undefined if the environment variable doesn't exist or cannot be accessed.\n */\nconst readEnv = (env) => {\n    if (typeof process !== 'undefined') {\n        return process.env?.[env] ?? undefined;\n    }\n    if (typeof Deno !== 'undefined') {\n        return Deno.env?.get?.(env);\n    }\n    return undefined;\n};\nexports.readEnv = readEnv;\nconst coerceInteger = (value) => {\n    if (typeof value === 'number')\n        return Math.round(value);\n    if (typeof value === 'string')\n        return parseInt(value, 10);\n    throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`);\n};\nexports.coerceInteger = coerceInteger;\nconst coerceFloat = (value) => {\n    if (typeof value === 'number')\n        return value;\n    if (typeof value === 'string')\n        return parseFloat(value);\n    throw new error_1.OpenAIError(`Could not coerce ${value} (type: ${typeof value}) into a number`);\n};\nexports.coerceFloat = coerceFloat;\nconst coerceBoolean = (value) => {\n    if (typeof value === 'boolean')\n        return value;\n    if (typeof value === 'string')\n        return value === 'true';\n    return Boolean(value);\n};\nexports.coerceBoolean = coerceBoolean;\nconst maybeCoerceInteger = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceInteger)(value);\n};\nexports.maybeCoerceInteger = maybeCoerceInteger;\nconst maybeCoerceFloat = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceFloat)(value);\n};\nexports.maybeCoerceFloat = maybeCoerceFloat;\nconst maybeCoerceBoolean = (value) => {\n    if (value === undefined) {\n        return undefined;\n    }\n    return (0, exports.coerceBoolean)(value);\n};\nexports.maybeCoerceBoolean = maybeCoerceBoolean;\n// https://stackoverflow.com/a/34491287\nfunction isEmptyObj(obj) {\n    if (!obj)\n        return true;\n    for (const _k in obj)\n        return false;\n    return true;\n}\nexports.isEmptyObj = isEmptyObj;\n// https://eslint.org/docs/latest/rules/no-prototype-builtins\nfunction hasOwn(obj, key) {\n    return Object.prototype.hasOwnProperty.call(obj, key);\n}\nexports.hasOwn = hasOwn;\nfunction debug(action, ...args) {\n    if (typeof process !== 'undefined' && process.env['DEBUG'] === 'true') {\n        console.log(`OpenAI:DEBUG:${action}`, ...args);\n    }\n}\nexports.debug = debug;\n/**\n * https://stackoverflow.com/a/2117523\n */\nconst uuid4 = () => {\n    return 'xxxxxxxx-xxxx-4xxx-yxxx-xxxxxxxxxxxx'.replace(/[xy]/g, (c) => {\n        const r = (Math.random() * 16) | 0;\n        const v = c === 'x' ? r : (r & 0x3) | 0x8;\n        return v.toString(16);\n    });\n};\nconst isRunningInBrowser = () => {\n    return (\n    // @ts-ignore\n    typeof window !== 'undefined' &&\n        // @ts-ignore\n        typeof window.document !== 'undefined' &&\n        // @ts-ignore\n        typeof navigator !== 'undefined');\n};\nexports.isRunningInBrowser = isRunningInBrowser;\nconst isHeadersProtocol = (headers) => {\n    return typeof headers?.get === 'function';\n};\nexports.isHeadersProtocol = isHeadersProtocol;\nconst getRequiredHeader = (headers, header) => {\n    const lowerCasedHeader = header.toLowerCase();\n    if ((0, exports.isHeadersProtocol)(headers)) {\n        // to deal with the case where the header looks like Stainless-Event-Id\n        const intercapsHeader = header[0]?.toUpperCase() +\n            header.substring(1).replace(/([^\\w])(\\w)/g, (_m, g1, g2) => g1 + g2.toUpperCase());\n        for (const key of [header, lowerCasedHeader, header.toUpperCase(), intercapsHeader]) {\n            const value = headers.get(key);\n            if (value) {\n                return value;\n            }\n        }\n    }\n    for (const [key, value] of Object.entries(headers)) {\n        if (key.toLowerCase() === lowerCasedHeader) {\n            if (Array.isArray(value)) {\n                if (value.length <= 1)\n                    return value[0];\n                console.warn(`Received ${value.length} entries for the ${header} header, using the first entry.`);\n                return value[0];\n            }\n            return value;\n        }\n    }\n    throw new Error(`Could not find ${header} header`);\n};\nexports.getRequiredHeader = getRequiredHeader;\n/**\n * Encodes a string to Base64 format.\n */\nconst toBase64 = (str) => {\n    if (!str)\n        return '';\n    if (typeof Buffer !== 'undefined') {\n        return Buffer.from(str).toString('base64');\n    }\n    if (typeof btoa !== 'undefined') {\n        return btoa(str);\n    }\n    throw new error_1.OpenAIError('Cannot generate b64 string; Expected `Buffer` or `btoa` to be defined');\n};\nexports.toBase64 = toBase64;\n//# sourceMappingURL=core.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.InternalServerError = exports.RateLimitError = exports.UnprocessableEntityError = exports.ConflictError = exports.NotFoundError = exports.PermissionDeniedError = exports.AuthenticationError = exports.BadRequestError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIUserAbortError = exports.APIError = exports.OpenAIError = void 0;\nconst core_1 = require(\"./core.js\");\nclass OpenAIError extends Error {\n}\nexports.OpenAIError = OpenAIError;\nclass APIError extends OpenAIError {\n    constructor(status, error, message, headers) {\n        super(`${APIError.makeMessage(status, error, message)}`);\n        this.status = status;\n        this.headers = headers;\n        const data = error;\n        this.error = data;\n        this.code = data?.['code'];\n        this.param = data?.['param'];\n        this.type = data?.['type'];\n    }\n    static makeMessage(status, error, message) {\n        const msg = error?.message ?\n            typeof error.message === 'string' ? error.message\n                : JSON.stringify(error.message)\n            : error ? JSON.stringify(error)\n                : message;\n        if (status && msg) {\n            return `${status} ${msg}`;\n        }\n        if (status) {\n            return `${status} status code (no body)`;\n        }\n        if (msg) {\n            return msg;\n        }\n        return '(no status code or body)';\n    }\n    static generate(status, errorResponse, message, headers) {\n        if (!status) {\n            return new APIConnectionError({ cause: (0, core_1.castToError)(errorResponse) });\n        }\n        const error = errorResponse?.['error'];\n        if (status === 400) {\n            return new BadRequestError(status, error, message, headers);\n        }\n        if (status === 401) {\n            return new AuthenticationError(status, error, message, headers);\n        }\n        if (status === 403) {\n            return new PermissionDeniedError(status, error, message, headers);\n        }\n        if (status === 404) {\n            return new NotFoundError(status, error, message, headers);\n        }\n        if (status === 409) {\n            return new ConflictError(status, error, message, headers);\n        }\n        if (status === 422) {\n            return new UnprocessableEntityError(status, error, message, headers);\n        }\n        if (status === 429) {\n            return new RateLimitError(status, error, message, headers);\n        }\n        if (status >= 500) {\n            return new InternalServerError(status, error, message, headers);\n        }\n        return new APIError(status, error, message, headers);\n    }\n}\nexports.APIError = APIError;\nclass APIUserAbortError extends APIError {\n    constructor({ message } = {}) {\n        super(undefined, undefined, message || 'Request was aborted.', undefined);\n        this.status = undefined;\n    }\n}\nexports.APIUserAbortError = APIUserAbortError;\nclass APIConnectionError extends APIError {\n    constructor({ message, cause }) {\n        super(undefined, undefined, message || 'Connection error.', undefined);\n        this.status = undefined;\n        // in some environments the 'cause' property is already declared\n        // @ts-ignore\n        if (cause)\n            this.cause = cause;\n    }\n}\nexports.APIConnectionError = APIConnectionError;\nclass APIConnectionTimeoutError extends APIConnectionError {\n    constructor({ message } = {}) {\n        super({ message: message ?? 'Request timed out.' });\n    }\n}\nexports.APIConnectionTimeoutError = APIConnectionTimeoutError;\nclass BadRequestError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 400;\n    }\n}\nexports.BadRequestError = BadRequestError;\nclass AuthenticationError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 401;\n    }\n}\nexports.AuthenticationError = AuthenticationError;\nclass PermissionDeniedError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 403;\n    }\n}\nexports.PermissionDeniedError = PermissionDeniedError;\nclass NotFoundError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 404;\n    }\n}\nexports.NotFoundError = NotFoundError;\nclass ConflictError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 409;\n    }\n}\nexports.ConflictError = ConflictError;\nclass UnprocessableEntityError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 422;\n    }\n}\nexports.UnprocessableEntityError = UnprocessableEntityError;\nclass RateLimitError extends APIError {\n    constructor() {\n        super(...arguments);\n        this.status = 429;\n    }\n}\nexports.RateLimitError = RateLimitError;\nclass InternalServerError extends APIError {\n}\nexports.InternalServerError = InternalServerError;\n//# sourceMappingURL=error.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nvar _a;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.fileFromPath = exports.toFile = exports.UnprocessableEntityError = exports.PermissionDeniedError = exports.InternalServerError = exports.AuthenticationError = exports.BadRequestError = exports.RateLimitError = exports.ConflictError = exports.NotFoundError = exports.APIUserAbortError = exports.APIConnectionTimeoutError = exports.APIConnectionError = exports.APIError = exports.OpenAIError = exports.OpenAI = void 0;\nconst Core = __importStar(require(\"./core.js\"));\nconst Pagination = __importStar(require(\"./pagination.js\"));\nconst Errors = __importStar(require(\"./error.js\"));\nconst Uploads = __importStar(require(\"./uploads.js\"));\nconst API = __importStar(require(\"openai/resources/index\"));\n/** API Client for interfacing with the OpenAI API. */\nclass OpenAI extends Core.APIClient {\n    /**\n     * API Client for interfacing with the OpenAI API.\n     *\n     * @param {string} [opts.apiKey==process.env['OPENAI_API_KEY'] ?? undefined]\n     * @param {string | null} [opts.organization==process.env['OPENAI_ORG_ID'] ?? null]\n     * @param {string} [opts.baseURL] - Override the default base URL for the API.\n     * @param {number} [opts.timeout=10 minutes] - The maximum amount of time (in milliseconds) the client will wait for a response before timing out.\n     * @param {number} [opts.httpAgent] - An HTTP agent used to manage HTTP(s) connections.\n     * @param {Core.Fetch} [opts.fetch] - Specify a custom `fetch` function implementation.\n     * @param {number} [opts.maxRetries=2] - The maximum number of times the client will retry a request.\n     * @param {Core.Headers} opts.defaultHeaders - Default headers to include with every request to the API.\n     * @param {Core.DefaultQuery} opts.defaultQuery - Default query parameters to include with every request to the API.\n     * @param {boolean} [opts.dangerouslyAllowBrowser=false] - By default, client-side use of this library is not allowed, as it risks exposing your secret API credentials to attackers.\n     */\n    constructor({ apiKey = Core.readEnv('OPENAI_API_KEY'), organization = Core.readEnv('OPENAI_ORG_ID') ?? null, ...opts } = {}) {\n        if (apiKey === undefined) {\n            throw new Errors.OpenAIError(\"The OPENAI_API_KEY environment variable is missing or empty; either provide it, or instantiate the OpenAI client with an apiKey option, like new OpenAI({ apiKey: 'My API Key' }).\");\n        }\n        const options = {\n            apiKey,\n            organization,\n            ...opts,\n            baseURL: opts.baseURL ?? `https://api.openai.com/v1`,\n        };\n        if (!options.dangerouslyAllowBrowser && Core.isRunningInBrowser()) {\n            throw new Errors.OpenAIError(\"It looks like you're running in a browser-like environment.\\n\\nThis is disabled by default, as it risks exposing your secret API credentials to attackers.\\nIf you understand the risks and have appropriate mitigations in place,\\nyou can set the `dangerouslyAllowBrowser` option to `true`, e.g.,\\n\\nnew OpenAI({ apiKey, dangerouslyAllowBrowser: true });\\n\\nhttps://help.openai.com/en/articles/5112595-best-practices-for-api-key-safety\\n\");\n        }\n        super({\n            baseURL: options.baseURL,\n            timeout: options.timeout ?? 600000 /* 10 minutes */,\n            httpAgent: options.httpAgent,\n            maxRetries: options.maxRetries,\n            fetch: options.fetch,\n        });\n        this.completions = new API.Completions(this);\n        this.chat = new API.Chat(this);\n        this.edits = new API.Edits(this);\n        this.embeddings = new API.Embeddings(this);\n        this.files = new API.Files(this);\n        this.images = new API.Images(this);\n        this.audio = new API.Audio(this);\n        this.moderations = new API.Moderations(this);\n        this.models = new API.Models(this);\n        this.fineTuning = new API.FineTuning(this);\n        this.fineTunes = new API.FineTunes(this);\n        this.beta = new API.Beta(this);\n        this._options = options;\n        this.apiKey = apiKey;\n        this.organization = organization;\n    }\n    defaultQuery() {\n        return this._options.defaultQuery;\n    }\n    defaultHeaders(opts) {\n        return {\n            ...super.defaultHeaders(opts),\n            'OpenAI-Organization': this.organization,\n            ...this._options.defaultHeaders,\n        };\n    }\n    authHeaders(opts) {\n        return { Authorization: `Bearer ${this.apiKey}` };\n    }\n}\nexports.OpenAI = OpenAI;\n_a = OpenAI;\nOpenAI.OpenAI = _a;\nOpenAI.OpenAIError = Errors.OpenAIError;\nOpenAI.APIError = Errors.APIError;\nOpenAI.APIConnectionError = Errors.APIConnectionError;\nOpenAI.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError;\nOpenAI.APIUserAbortError = Errors.APIUserAbortError;\nOpenAI.NotFoundError = Errors.NotFoundError;\nOpenAI.ConflictError = Errors.ConflictError;\nOpenAI.RateLimitError = Errors.RateLimitError;\nOpenAI.BadRequestError = Errors.BadRequestError;\nOpenAI.AuthenticationError = Errors.AuthenticationError;\nOpenAI.InternalServerError = Errors.InternalServerError;\nOpenAI.PermissionDeniedError = Errors.PermissionDeniedError;\nOpenAI.UnprocessableEntityError = Errors.UnprocessableEntityError;\nexports.OpenAIError = Errors.OpenAIError, exports.APIError = Errors.APIError, exports.APIConnectionError = Errors.APIConnectionError, exports.APIConnectionTimeoutError = Errors.APIConnectionTimeoutError, exports.APIUserAbortError = Errors.APIUserAbortError, exports.NotFoundError = Errors.NotFoundError, exports.ConflictError = Errors.ConflictError, exports.RateLimitError = Errors.RateLimitError, exports.BadRequestError = Errors.BadRequestError, exports.AuthenticationError = Errors.AuthenticationError, exports.InternalServerError = Errors.InternalServerError, exports.PermissionDeniedError = Errors.PermissionDeniedError, exports.UnprocessableEntityError = Errors.UnprocessableEntityError;\nexports.toFile = Uploads.toFile;\nexports.fileFromPath = Uploads.fileFromPath;\n(function (OpenAI) {\n    // Helper functions\n    OpenAI.toFile = Uploads.toFile;\n    OpenAI.fileFromPath = Uploads.fileFromPath;\n    OpenAI.Page = Pagination.Page;\n    OpenAI.CursorPage = Pagination.CursorPage;\n    OpenAI.Completions = API.Completions;\n    OpenAI.Chat = API.Chat;\n    OpenAI.Edits = API.Edits;\n    OpenAI.Embeddings = API.Embeddings;\n    OpenAI.Files = API.Files;\n    OpenAI.FileObjectsPage = API.FileObjectsPage;\n    OpenAI.Images = API.Images;\n    OpenAI.Audio = API.Audio;\n    OpenAI.Moderations = API.Moderations;\n    OpenAI.Models = API.Models;\n    OpenAI.ModelsPage = API.ModelsPage;\n    OpenAI.FineTuning = API.FineTuning;\n    OpenAI.FineTunes = API.FineTunes;\n    OpenAI.FineTunesPage = API.FineTunesPage;\n    OpenAI.Beta = API.Beta;\n})(OpenAI = exports.OpenAI || (exports.OpenAI = {}));\nexports = module.exports = OpenAI;\nexports.default = OpenAI;\n//# sourceMappingURL=index.js.map","\"use strict\";\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar _AbstractChatCompletionRunner_instances, _AbstractChatCompletionRunner_connectedPromise, _AbstractChatCompletionRunner_resolveConnectedPromise, _AbstractChatCompletionRunner_rejectConnectedPromise, _AbstractChatCompletionRunner_endPromise, _AbstractChatCompletionRunner_resolveEndPromise, _AbstractChatCompletionRunner_rejectEndPromise, _AbstractChatCompletionRunner_listeners, _AbstractChatCompletionRunner_ended, _AbstractChatCompletionRunner_errored, _AbstractChatCompletionRunner_aborted, _AbstractChatCompletionRunner_catchingPromiseCreated, _AbstractChatCompletionRunner_getFinalContent, _AbstractChatCompletionRunner_getFinalMessage, _AbstractChatCompletionRunner_getFinalFunctionCall, _AbstractChatCompletionRunner_getFinalFunctionCallResult, _AbstractChatCompletionRunner_calculateTotalUsage, _AbstractChatCompletionRunner_handleError, _AbstractChatCompletionRunner_validateParams, _AbstractChatCompletionRunner_stringifyFunctionCallResult;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AbstractChatCompletionRunner = void 0;\nconst error_1 = require(\"openai/error\");\nconst RunnableFunction_1 = require(\"./RunnableFunction.js\");\nconst chatCompletionUtils_1 = require(\"./chatCompletionUtils.js\");\nconst DEFAULT_MAX_CHAT_COMPLETIONS = 10;\nclass AbstractChatCompletionRunner {\n    constructor() {\n        _AbstractChatCompletionRunner_instances.add(this);\n        this.controller = new AbortController();\n        _AbstractChatCompletionRunner_connectedPromise.set(this, void 0);\n        _AbstractChatCompletionRunner_resolveConnectedPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_rejectConnectedPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_endPromise.set(this, void 0);\n        _AbstractChatCompletionRunner_resolveEndPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_rejectEndPromise.set(this, () => { });\n        _AbstractChatCompletionRunner_listeners.set(this, {});\n        this._chatCompletions = [];\n        this.messages = [];\n        _AbstractChatCompletionRunner_ended.set(this, false);\n        _AbstractChatCompletionRunner_errored.set(this, false);\n        _AbstractChatCompletionRunner_aborted.set(this, false);\n        _AbstractChatCompletionRunner_catchingPromiseCreated.set(this, false);\n        _AbstractChatCompletionRunner_handleError.set(this, (error) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_errored, true, \"f\");\n            if (error instanceof Error && error.name === 'AbortError') {\n                error = new error_1.APIUserAbortError();\n            }\n            if (error instanceof error_1.APIUserAbortError) {\n                __classPrivateFieldSet(this, _AbstractChatCompletionRunner_aborted, true, \"f\");\n                return this._emit('abort', error);\n            }\n            if (error instanceof error_1.OpenAIError) {\n                return this._emit('error', error);\n            }\n            if (error instanceof Error) {\n                const openAIError = new error_1.OpenAIError(error.message);\n                // @ts-ignore\n                openAIError.cause = error;\n                return this._emit('error', openAIError);\n            }\n            return this._emit('error', new error_1.OpenAIError(String(error)));\n        });\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_connectedPromise, new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_resolveConnectedPromise, resolve, \"f\");\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, reject, \"f\");\n        }), \"f\");\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_endPromise, new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_resolveEndPromise, resolve, \"f\");\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_rejectEndPromise, reject, \"f\");\n        }), \"f\");\n        // Don't let these promises cause unhandled rejection errors.\n        // we will manually cause an unhandled rejection error later\n        // if the user hasn't registered any error listener or called\n        // any promise-returning method.\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_connectedPromise, \"f\").catch(() => { });\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_endPromise, \"f\").catch(() => { });\n    }\n    _run(executor) {\n        // Unfortunately if we call `executor()` immediately we get runtime errors about\n        // references to `this` before the `super()` constructor call returns.\n        setTimeout(() => {\n            executor().then(() => {\n                this._emitFinal();\n                this._emit('end');\n            }, __classPrivateFieldGet(this, _AbstractChatCompletionRunner_handleError, \"f\"));\n        }, 0);\n    }\n    _addChatCompletion(chatCompletion) {\n        this._chatCompletions.push(chatCompletion);\n        this._emit('chatCompletion', chatCompletion);\n        const message = chatCompletion.choices[0]?.message;\n        if (message)\n            this._addMessage(message);\n        return chatCompletion;\n    }\n    _addMessage(message, emit = true) {\n        this.messages.push(message);\n        if (emit) {\n            this._emit('message', message);\n            if (((0, chatCompletionUtils_1.isFunctionMessage)(message) || (0, chatCompletionUtils_1.isToolMessage)(message)) && message.content) {\n                // Note, this assumes that {role: 'tool', content: …} is always the result of a call of tool of type=function.\n                this._emit('functionCallResult', message.content);\n            }\n            else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.function_call) {\n                this._emit('functionCall', message.function_call);\n            }\n            else if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.tool_calls) {\n                for (const tool_call of message.tool_calls) {\n                    if (tool_call.type === 'function') {\n                        this._emit('functionCall', tool_call.function);\n                    }\n                }\n            }\n        }\n    }\n    _connected() {\n        if (this.ended)\n            return;\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_resolveConnectedPromise, \"f\").call(this);\n        this._emit('connect');\n    }\n    get ended() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_ended, \"f\");\n    }\n    get errored() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_errored, \"f\");\n    }\n    get aborted() {\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_aborted, \"f\");\n    }\n    abort() {\n        this.controller.abort();\n    }\n    /**\n     * Adds the listener function to the end of the listeners array for the event.\n     * No checks are made to see if the listener has already been added. Multiple calls passing\n     * the same combination of event and listener will result in the listener being added, and\n     * called, multiple times.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    on(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] || (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = []);\n        listeners.push({ listener });\n        return this;\n    }\n    /**\n     * Removes the specified listener from the listener array for the event.\n     * off() will remove, at most, one instance of a listener from the listener array. If any single\n     * listener has been added multiple times to the listener array for the specified event, then\n     * off() must be called multiple times to remove each instance.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    off(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event];\n        if (!listeners)\n            return this;\n        const index = listeners.findIndex((l) => l.listener === listener);\n        if (index >= 0)\n            listeners.splice(index, 1);\n        return this;\n    }\n    /**\n     * Adds a one-time listener function for the event. The next time the event is triggered,\n     * this listener is removed and then invoked.\n     * @returns this ChatCompletionStream, so that calls can be chained\n     */\n    once(event, listener) {\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] || (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = []);\n        listeners.push({ listener, once: true });\n        return this;\n    }\n    /**\n     * This is similar to `.once()`, but returns a Promise that resolves the next time\n     * the event is triggered, instead of calling a listener callback.\n     * @returns a Promise that resolves the next time given event is triggered,\n     * or rejects if an error is emitted.  (If you request the 'error' event,\n     * returns a promise that resolves with the error).\n     *\n     * Example:\n     *\n     *   const message = await stream.emitted('message') // rejects if the stream errors\n     */\n    emitted(event) {\n        return new Promise((resolve, reject) => {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, true, \"f\");\n            if (event !== 'error')\n                this.once('error', reject);\n            this.once(event, resolve);\n        });\n    }\n    async done() {\n        __classPrivateFieldSet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, true, \"f\");\n        await __classPrivateFieldGet(this, _AbstractChatCompletionRunner_endPromise, \"f\");\n    }\n    /**\n     * @returns a promise that resolves with the final ChatCompletion, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletion.\n     */\n    async finalChatCompletion() {\n        await this.done();\n        const completion = this._chatCompletions[this._chatCompletions.length - 1];\n        if (!completion)\n            throw new error_1.OpenAIError('stream ended without producing a ChatCompletion');\n        return completion;\n    }\n    /**\n     * @returns a promise that resolves with the content of the final ChatCompletionMessage, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalContent() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalContent).call(this);\n    }\n    /**\n     * @returns a promise that resolves with the the final assistant ChatCompletionMessage response,\n     * or rejects if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalMessage() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalMessage).call(this);\n    }\n    /**\n     * @returns a promise that resolves with the content of the final FunctionCall, or rejects\n     * if an error occurred or the stream ended prematurely without producing a ChatCompletionMessage.\n     */\n    async finalFunctionCall() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);\n    }\n    async finalFunctionCallResult() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);\n    }\n    async totalUsage() {\n        await this.done();\n        return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_calculateTotalUsage).call(this);\n    }\n    allChatCompletions() {\n        return [...this._chatCompletions];\n    }\n    _emit(event, ...args) {\n        // make sure we don't emit any events after end\n        if (__classPrivateFieldGet(this, _AbstractChatCompletionRunner_ended, \"f\"))\n            return;\n        if (event === 'end') {\n            __classPrivateFieldSet(this, _AbstractChatCompletionRunner_ended, true, \"f\");\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_resolveEndPromise, \"f\").call(this);\n        }\n        const listeners = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event];\n        if (listeners) {\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_listeners, \"f\")[event] = listeners.filter((l) => !l.once);\n            listeners.forEach(({ listener }) => listener(...args));\n        }\n        if (event === 'abort') {\n            const error = args[0];\n            if (!__classPrivateFieldGet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, \"f\") && !listeners?.length) {\n                Promise.reject(error);\n            }\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, \"f\").call(this, error);\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectEndPromise, \"f\").call(this, error);\n            this._emit('end');\n            return;\n        }\n        if (event === 'error') {\n            // NOTE: _emit('error', error) should only be called from #handleError().\n            const error = args[0];\n            if (!__classPrivateFieldGet(this, _AbstractChatCompletionRunner_catchingPromiseCreated, \"f\") && !listeners?.length) {\n                // Trigger an unhandled rejection if the user hasn't registered any error handlers.\n                // If you are seeing stack traces here, make sure to handle errors via either:\n                // - runner.on('error', () => ...)\n                // - await runner.done()\n                // - await runner.finalChatCompletion()\n                // - etc.\n                Promise.reject(error);\n            }\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectConnectedPromise, \"f\").call(this, error);\n            __classPrivateFieldGet(this, _AbstractChatCompletionRunner_rejectEndPromise, \"f\").call(this, error);\n            this._emit('end');\n        }\n    }\n    _emitFinal() {\n        const completion = this._chatCompletions[this._chatCompletions.length - 1];\n        if (completion)\n            this._emit('finalChatCompletion', completion);\n        const finalMessage = this.messages[this.messages.length - 1];\n        if (finalMessage)\n            this._emit('finalMessage', finalMessage);\n        const finalContent = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalContent).call(this);\n        if (finalContent)\n            this._emit('finalContent', finalContent);\n        const finalFunctionCall = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCall).call(this);\n        if (finalFunctionCall)\n            this._emit('finalFunctionCall', finalFunctionCall);\n        const finalFunctionCallResult = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalFunctionCallResult).call(this);\n        if (finalFunctionCallResult != null)\n            this._emit('finalFunctionCallResult', finalFunctionCallResult);\n        if (this._chatCompletions.some((c) => c.usage)) {\n            this._emit('totalUsage', __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_calculateTotalUsage).call(this));\n        }\n    }\n    async _createChatCompletion(completions, params, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_validateParams).call(this, params);\n        const chatCompletion = await completions.create({ ...params, stream: false }, { ...options, signal: this.controller.signal });\n        this._connected();\n        return this._addChatCompletion(chatCompletion);\n    }\n    async _runChatCompletion(completions, params, options) {\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        return await this._createChatCompletion(completions, params, options);\n    }\n    async _runFunctions(completions, params, options) {\n        const role = 'function';\n        const { function_call = 'auto', stream, ...restParams } = params;\n        const singleFunctionToCall = typeof function_call !== 'string' && function_call?.name;\n        const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};\n        const functionsByName = {};\n        for (const f of params.functions) {\n            functionsByName[f.name || f.function.name] = f;\n        }\n        const functions = params.functions.map((f) => ({\n            name: f.name || f.function.name,\n            parameters: f.parameters,\n            description: f.description,\n        }));\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        for (let i = 0; i < maxChatCompletions; ++i) {\n            const chatCompletion = await this._createChatCompletion(completions, {\n                ...restParams,\n                function_call,\n                functions,\n                messages: [...this.messages],\n            }, options);\n            const message = chatCompletion.choices[0]?.message;\n            if (!message) {\n                throw new error_1.OpenAIError(`missing message in ChatCompletion response`);\n            }\n            if (!message.function_call)\n                return;\n            const { name, arguments: args } = message.function_call;\n            const fn = functionsByName[name];\n            if (!fn) {\n                const content = `Invalid function_call: ${JSON.stringify(name)}. Available options are: ${functions\n                    .map((f) => JSON.stringify(f.name))\n                    .join(', ')}. Please try again`;\n                this._addMessage({ role, name, content });\n                continue;\n            }\n            else if (singleFunctionToCall && singleFunctionToCall !== name) {\n                const content = `Invalid function_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`;\n                this._addMessage({ role, name, content });\n                continue;\n            }\n            let parsed;\n            try {\n                parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args;\n            }\n            catch (error) {\n                this._addMessage({\n                    role,\n                    name,\n                    content: error instanceof Error ? error.message : String(error),\n                });\n                continue;\n            }\n            // @ts-expect-error it can't rule out `never` type.\n            const rawContent = await fn.function(parsed, this);\n            const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);\n            this._addMessage({ role, name, content });\n            if (singleFunctionToCall)\n                return;\n        }\n    }\n    async _runTools(completions, params, options) {\n        const role = 'tool';\n        const { tool_choice = 'auto', stream, ...restParams } = params;\n        const singleFunctionToCall = typeof tool_choice !== 'string' && tool_choice?.function?.name;\n        const { maxChatCompletions = DEFAULT_MAX_CHAT_COMPLETIONS } = options || {};\n        const functionsByName = {};\n        for (const f of params.tools) {\n            if (f.type === 'function') {\n                functionsByName[f.function.name || f.function.function.name] = f.function;\n            }\n        }\n        const tools = 'tools' in params ?\n            params.tools.map((t) => t.type === 'function' ?\n                {\n                    type: 'function',\n                    function: {\n                        name: t.function.name || t.function.function.name,\n                        parameters: t.function.parameters,\n                        description: t.function.description,\n                    },\n                }\n                : t)\n            : undefined;\n        for (const message of params.messages) {\n            this._addMessage(message, false);\n        }\n        for (let i = 0; i < maxChatCompletions; ++i) {\n            const chatCompletion = await this._createChatCompletion(completions, {\n                ...restParams,\n                tool_choice,\n                tools,\n                messages: [...this.messages],\n            }, options);\n            const message = chatCompletion.choices[0]?.message;\n            if (!message) {\n                throw new error_1.OpenAIError(`missing message in ChatCompletion response`);\n            }\n            if (!message.tool_calls)\n                return;\n            for (const tool_call of message.tool_calls) {\n                if (tool_call.type !== 'function')\n                    continue;\n                const tool_call_id = tool_call.id;\n                const { name, arguments: args } = tool_call.function;\n                const fn = functionsByName[name];\n                if (!fn) {\n                    const content = `Invalid tool_call: ${JSON.stringify(name)}. Available options are: ${tools\n                        .map((f) => JSON.stringify(f.function.name))\n                        .join(', ')}. Please try again`;\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                else if (singleFunctionToCall && singleFunctionToCall !== name) {\n                    const content = `Invalid tool_call: ${JSON.stringify(name)}. ${JSON.stringify(singleFunctionToCall)} requested. Please try again`;\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                let parsed;\n                try {\n                    parsed = (0, RunnableFunction_1.isRunnableFunctionWithParse)(fn) ? await fn.parse(args) : args;\n                }\n                catch (error) {\n                    const content = error instanceof Error ? error.message : String(error);\n                    this._addMessage({ role, tool_call_id, content });\n                    continue;\n                }\n                // @ts-expect-error it can't rule out `never` type.\n                const rawContent = await fn.function(parsed, this);\n                const content = __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_stringifyFunctionCallResult).call(this, rawContent);\n                this._addMessage({ role, tool_call_id, content });\n                if (singleFunctionToCall)\n                    return;\n            }\n        }\n    }\n}\nexports.AbstractChatCompletionRunner = AbstractChatCompletionRunner;\n_AbstractChatCompletionRunner_connectedPromise = new WeakMap(), _AbstractChatCompletionRunner_resolveConnectedPromise = new WeakMap(), _AbstractChatCompletionRunner_rejectConnectedPromise = new WeakMap(), _AbstractChatCompletionRunner_endPromise = new WeakMap(), _AbstractChatCompletionRunner_resolveEndPromise = new WeakMap(), _AbstractChatCompletionRunner_rejectEndPromise = new WeakMap(), _AbstractChatCompletionRunner_listeners = new WeakMap(), _AbstractChatCompletionRunner_ended = new WeakMap(), _AbstractChatCompletionRunner_errored = new WeakMap(), _AbstractChatCompletionRunner_aborted = new WeakMap(), _AbstractChatCompletionRunner_catchingPromiseCreated = new WeakMap(), _AbstractChatCompletionRunner_handleError = new WeakMap(), _AbstractChatCompletionRunner_instances = new WeakSet(), _AbstractChatCompletionRunner_getFinalContent = function _AbstractChatCompletionRunner_getFinalContent() {\n    return __classPrivateFieldGet(this, _AbstractChatCompletionRunner_instances, \"m\", _AbstractChatCompletionRunner_getFinalMessage).call(this).content;\n}, _AbstractChatCompletionRunner_getFinalMessage = function _AbstractChatCompletionRunner_getFinalMessage() {\n    let i = this.messages.length;\n    while (i-- > 0) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message)) {\n            return message;\n        }\n    }\n    throw new error_1.OpenAIError('stream ended without producing a ChatCompletionMessage with role=assistant');\n}, _AbstractChatCompletionRunner_getFinalFunctionCall = function _AbstractChatCompletionRunner_getFinalFunctionCall() {\n    for (let i = this.messages.length - 1; i >= 0; i--) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message?.function_call) {\n            return message.function_call;\n        }\n    }\n    return;\n}, _AbstractChatCompletionRunner_getFinalFunctionCallResult = function _AbstractChatCompletionRunner_getFinalFunctionCallResult() {\n    for (let i = this.messages.length - 1; i >= 0; i--) {\n        const message = this.messages[i];\n        if ((0, chatCompletionUtils_1.isFunctionMessage)(message) && message.content != null) {\n            return message.content;\n        }\n    }\n    return;\n}, _AbstractChatCompletionRunner_calculateTotalUsage = function _AbstractChatCompletionRunner_calculateTotalUsage() {\n    const total = {\n        completion_tokens: 0,\n        prompt_tokens: 0,\n        total_tokens: 0,\n    };\n    for (const { usage } of this._chatCompletions) {\n        if (usage) {\n            total.completion_tokens += usage.completion_tokens;\n            total.prompt_tokens += usage.prompt_tokens;\n            total.total_tokens += usage.total_tokens;\n        }\n    }\n    return total;\n}, _AbstractChatCompletionRunner_validateParams = function _AbstractChatCompletionRunner_validateParams(params) {\n    if (params.n != null && params.n > 1) {\n        throw new error_1.OpenAIError('ChatCompletion convenience helpers only support n=1 at this time. To use n>1, please use chat.completions.create() directly.');\n    }\n}, _AbstractChatCompletionRunner_stringifyFunctionCallResult = function _AbstractChatCompletionRunner_stringifyFunctionCallResult(rawContent) {\n    return (typeof rawContent === 'string' ? rawContent\n        : rawContent === undefined ? 'undefined'\n            : JSON.stringify(rawContent));\n};\n//# sourceMappingURL=AbstractChatCompletionRunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionRunner = void 0;\nconst AbstractChatCompletionRunner_1 = require(\"./AbstractChatCompletionRunner.js\");\nconst chatCompletionUtils_1 = require(\"./chatCompletionUtils.js\");\nclass ChatCompletionRunner extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner {\n    static runFunctions(completions, params, options) {\n        const runner = new ChatCompletionRunner();\n        runner._run(() => runner._runFunctions(completions, params, options));\n        return runner;\n    }\n    static runTools(completions, params, options) {\n        const runner = new ChatCompletionRunner();\n        runner._run(() => runner._runTools(completions, params, options));\n        return runner;\n    }\n    _addMessage(message) {\n        super._addMessage(message);\n        if ((0, chatCompletionUtils_1.isAssistantMessage)(message) && message.content) {\n            this._emit('content', message.content);\n        }\n    }\n}\nexports.ChatCompletionRunner = ChatCompletionRunner;\n//# sourceMappingURL=ChatCompletionRunner.js.map","\"use strict\";\nvar __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a getter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot read private member from an object whose class did not declare it\");\n    return kind === \"m\" ? f : kind === \"a\" ? f.call(receiver) : f ? f.value : state.get(receiver);\n};\nvar __classPrivateFieldSet = (this && this.__classPrivateFieldSet) || function (receiver, state, value, kind, f) {\n    if (kind === \"m\") throw new TypeError(\"Private method is not writable\");\n    if (kind === \"a\" && !f) throw new TypeError(\"Private accessor was defined without a setter\");\n    if (typeof state === \"function\" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError(\"Cannot write private member to an object whose class did not declare it\");\n    return (kind === \"a\" ? f.call(receiver, value) : f ? f.value = value : state.set(receiver, value)), value;\n};\nvar _ChatCompletionStream_instances, _ChatCompletionStream_currentChatCompletionSnapshot, _ChatCompletionStream_beginRequest, _ChatCompletionStream_addChunk, _ChatCompletionStream_endRequest, _ChatCompletionStream_accumulateChatCompletion;\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionStream = void 0;\nconst error_1 = require(\"openai/error\");\nconst AbstractChatCompletionRunner_1 = require(\"./AbstractChatCompletionRunner.js\");\nconst streaming_1 = require(\"openai/streaming\");\nclass ChatCompletionStream extends AbstractChatCompletionRunner_1.AbstractChatCompletionRunner {\n    constructor() {\n        super(...arguments);\n        _ChatCompletionStream_instances.add(this);\n        _ChatCompletionStream_currentChatCompletionSnapshot.set(this, void 0);\n    }\n    get currentChatCompletionSnapshot() {\n        return __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n    }\n    /**\n     * Intended for use on the frontend, consuming a stream produced with\n     * `.toReadableStream()` on the backend.\n     *\n     * Note that messages sent to the model do not appear in `.on('message')`\n     * in this context.\n     */\n    static fromReadableStream(stream) {\n        const runner = new ChatCompletionStream();\n        runner._run(() => runner._fromReadableStream(stream));\n        return runner;\n    }\n    static createChatCompletion(completions, params, options) {\n        const runner = new ChatCompletionStream();\n        runner._run(() => runner._runChatCompletion(completions, { ...params, stream: true }, { ...options, headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'stream' } }));\n        return runner;\n    }\n    async _createChatCompletion(completions, params, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_beginRequest).call(this);\n        const stream = await completions.create({ ...params, stream: true }, { ...options, signal: this.controller.signal });\n        this._connected();\n        for await (const chunk of stream) {\n            __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_addChunk).call(this, chunk);\n        }\n        if (stream.controller.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n    }\n    async _fromReadableStream(readableStream, options) {\n        const signal = options?.signal;\n        if (signal) {\n            if (signal.aborted)\n                this.controller.abort();\n            signal.addEventListener('abort', () => this.controller.abort());\n        }\n        __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_beginRequest).call(this);\n        this._connected();\n        const stream = streaming_1.Stream.fromReadableStream(readableStream, this.controller);\n        let chatId;\n        for await (const chunk of stream) {\n            if (chatId && chatId !== chunk.id) {\n                // A new request has been made.\n                this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n            }\n            __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_addChunk).call(this, chunk);\n            chatId = chunk.id;\n        }\n        if (stream.controller.signal?.aborted) {\n            throw new error_1.APIUserAbortError();\n        }\n        return this._addChatCompletion(__classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_endRequest).call(this));\n    }\n    [(_ChatCompletionStream_currentChatCompletionSnapshot = new WeakMap(), _ChatCompletionStream_instances = new WeakSet(), _ChatCompletionStream_beginRequest = function _ChatCompletionStream_beginRequest() {\n        if (this.ended)\n            return;\n        __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, \"f\");\n    }, _ChatCompletionStream_addChunk = function _ChatCompletionStream_addChunk(chunk) {\n        if (this.ended)\n            return;\n        const completion = __classPrivateFieldGet(this, _ChatCompletionStream_instances, \"m\", _ChatCompletionStream_accumulateChatCompletion).call(this, chunk);\n        this._emit('chunk', chunk, completion);\n        const delta = chunk.choices[0]?.delta?.content;\n        const snapshot = completion.choices[0]?.message;\n        if (delta != null && snapshot?.role === 'assistant' && snapshot?.content) {\n            this._emit('content', delta, snapshot.content);\n        }\n    }, _ChatCompletionStream_endRequest = function _ChatCompletionStream_endRequest() {\n        if (this.ended) {\n            throw new error_1.OpenAIError(`stream has ended, this shouldn't happen`);\n        }\n        const snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n        if (!snapshot) {\n            throw new error_1.OpenAIError(`request ended without sending any chunks`);\n        }\n        __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, undefined, \"f\");\n        return finalizeChatCompletion(snapshot);\n    }, _ChatCompletionStream_accumulateChatCompletion = function _ChatCompletionStream_accumulateChatCompletion(chunk) {\n        var _a, _b;\n        let snapshot = __classPrivateFieldGet(this, _ChatCompletionStream_currentChatCompletionSnapshot, \"f\");\n        const { choices, ...rest } = chunk;\n        if (!snapshot) {\n            snapshot = __classPrivateFieldSet(this, _ChatCompletionStream_currentChatCompletionSnapshot, {\n                ...rest,\n                choices: [],\n            }, \"f\");\n        }\n        else {\n            Object.assign(snapshot, rest);\n        }\n        for (const { delta, finish_reason, index, ...other } of chunk.choices) {\n            let choice = snapshot.choices[index];\n            if (!choice) {\n                snapshot.choices[index] = { finish_reason, index, message: delta, ...other };\n                continue;\n            }\n            if (finish_reason)\n                choice.finish_reason = finish_reason;\n            Object.assign(choice, other);\n            if (!delta)\n                continue; // Shouldn't happen; just in case.\n            const { content, function_call, role, tool_calls } = delta;\n            if (content)\n                choice.message.content = (choice.message.content || '') + content;\n            if (role)\n                choice.message.role = role;\n            if (function_call) {\n                if (!choice.message.function_call) {\n                    choice.message.function_call = function_call;\n                }\n                else {\n                    if (function_call.name)\n                        choice.message.function_call.name = function_call.name;\n                    if (function_call.arguments) {\n                        (_a = choice.message.function_call).arguments ?? (_a.arguments = '');\n                        choice.message.function_call.arguments += function_call.arguments;\n                    }\n                }\n            }\n            if (tool_calls) {\n                if (!choice.message.tool_calls)\n                    choice.message.tool_calls = [];\n                for (const { index, id, type, function: fn } of tool_calls) {\n                    const tool_call = ((_b = choice.message.tool_calls)[index] ?? (_b[index] = {}));\n                    if (id)\n                        tool_call.id = id;\n                    if (type)\n                        tool_call.type = type;\n                    if (fn)\n                        tool_call.function ?? (tool_call.function = { arguments: '' });\n                    if (fn?.name)\n                        tool_call.function.name = fn.name;\n                    if (fn?.arguments)\n                        tool_call.function.arguments += fn.arguments;\n                }\n            }\n        }\n        return snapshot;\n    }, Symbol.asyncIterator)]() {\n        const pushQueue = [];\n        const readQueue = [];\n        let done = false;\n        this.on('chunk', (chunk) => {\n            const reader = readQueue.shift();\n            if (reader) {\n                reader(chunk);\n            }\n            else {\n                pushQueue.push(chunk);\n            }\n        });\n        this.on('end', () => {\n            done = true;\n            for (const reader of readQueue) {\n                reader(undefined);\n            }\n            readQueue.length = 0;\n        });\n        return {\n            next: async () => {\n                if (!pushQueue.length) {\n                    if (done) {\n                        return { value: undefined, done: true };\n                    }\n                    return new Promise((resolve) => readQueue.push(resolve)).then((chunk) => (chunk ? { value: chunk, done: false } : { value: undefined, done: true }));\n                }\n                const chunk = pushQueue.shift();\n                return { value: chunk, done: false };\n            },\n        };\n    }\n    toReadableStream() {\n        const stream = new streaming_1.Stream(this[Symbol.asyncIterator].bind(this), this.controller);\n        return stream.toReadableStream();\n    }\n}\nexports.ChatCompletionStream = ChatCompletionStream;\nfunction finalizeChatCompletion(snapshot) {\n    const { id, choices, created, model } = snapshot;\n    return {\n        id,\n        choices: choices.map(({ message, finish_reason, index }) => {\n            if (!finish_reason)\n                throw new error_1.OpenAIError(`missing finish_reason for choice ${index}`);\n            const { content = null, function_call, tool_calls } = message;\n            const role = message.role; // this is what we expect; in theory it could be different which would make our types a slight lie but would be fine.\n            if (!role)\n                throw new error_1.OpenAIError(`missing role for choice ${index}`);\n            if (function_call) {\n                const { arguments: args, name } = function_call;\n                if (args == null)\n                    throw new error_1.OpenAIError(`missing function_call.arguments for choice ${index}`);\n                if (!name)\n                    throw new error_1.OpenAIError(`missing function_call.name for choice ${index}`);\n                return { message: { content, function_call: { arguments: args, name }, role }, finish_reason, index };\n            }\n            if (tool_calls) {\n                return {\n                    index,\n                    finish_reason,\n                    message: {\n                        role,\n                        content,\n                        tool_calls: tool_calls.map((tool_call, i) => {\n                            const { function: fn, type, id } = tool_call;\n                            const { arguments: args, name } = fn || {};\n                            if (id == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].id\\n${str(snapshot)}`);\n                            if (type == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].type\\n${str(snapshot)}`);\n                            if (name == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.name\\n${str(snapshot)}`);\n                            if (args == null)\n                                throw new error_1.OpenAIError(`missing choices[${index}].tool_calls[${i}].function.arguments\\n${str(snapshot)}`);\n                            return { id, type, function: { name, arguments: args } };\n                        }),\n                    },\n                };\n            }\n            return { message: { content: content, role }, finish_reason, index };\n        }),\n        created,\n        model,\n        object: 'chat.completion',\n    };\n}\nfunction str(x) {\n    return JSON.stringify(x);\n}\n//# sourceMappingURL=ChatCompletionStream.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ChatCompletionStreamingRunner = void 0;\nconst ChatCompletionStream_1 = require(\"./ChatCompletionStream.js\");\nclass ChatCompletionStreamingRunner extends ChatCompletionStream_1.ChatCompletionStream {\n    static fromReadableStream(stream) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._fromReadableStream(stream));\n        return runner;\n    }\n    static runFunctions(completions, params, options) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._runFunctions(completions, params, {\n            ...options,\n            headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runFunctions' },\n        }));\n        return runner;\n    }\n    static runTools(completions, params, options) {\n        const runner = new ChatCompletionStreamingRunner();\n        runner._run(() => runner._runTools(completions, params, {\n            ...options,\n            headers: { ...options?.headers, 'X-Stainless-Helper-Method': 'runTools' },\n        }));\n        return runner;\n    }\n}\nexports.ChatCompletionStreamingRunner = ChatCompletionStreamingRunner;\n//# sourceMappingURL=ChatCompletionStreamingRunner.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ParsingFunction = exports.isRunnableFunctionWithParse = void 0;\nfunction isRunnableFunctionWithParse(fn) {\n    return typeof fn.parse === 'function';\n}\nexports.isRunnableFunctionWithParse = isRunnableFunctionWithParse;\n/**\n * This is helper class for passing a `function` and `parse` where the `function`\n * argument type matches the `parse` return type.\n */\nclass ParsingFunction {\n    constructor(input) {\n        this.function = input.function;\n        this.parse = input.parse;\n        this.parameters = input.parameters;\n        this.description = input.description;\n        this.name = input.name;\n    }\n}\nexports.ParsingFunction = ParsingFunction;\n//# sourceMappingURL=RunnableFunction.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.isPresent = exports.isToolMessage = exports.isFunctionMessage = exports.isAssistantMessage = void 0;\nconst isAssistantMessage = (message) => {\n    return message?.role === 'assistant';\n};\nexports.isAssistantMessage = isAssistantMessage;\nconst isFunctionMessage = (message) => {\n    return message?.role === 'function';\n};\nexports.isFunctionMessage = isFunctionMessage;\nconst isToolMessage = (message) => {\n    return message?.role === 'tool';\n};\nexports.isToolMessage = isToolMessage;\nfunction isPresent(obj) {\n    return obj != null;\n}\nexports.isPresent = isPresent;\n//# sourceMappingURL=chatCompletionUtils.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.CursorPage = exports.Page = void 0;\nconst core_1 = require(\"./core.js\");\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass Page extends core_1.AbstractPage {\n    constructor(client, response, body, options) {\n        super(client, response, body, options);\n        this.data = body.data;\n        this.object = body.object;\n    }\n    getPaginatedItems() {\n        return this.data;\n    }\n    // @deprecated Please use `nextPageInfo()` instead\n    /**\n     * This page represents a response that isn't actually paginated at the API level\n     * so there will never be any next page params.\n     */\n    nextPageParams() {\n        return null;\n    }\n    nextPageInfo() {\n        return null;\n    }\n}\nexports.Page = Page;\nclass CursorPage extends core_1.AbstractPage {\n    constructor(client, response, body, options) {\n        super(client, response, body, options);\n        this.data = body.data;\n    }\n    getPaginatedItems() {\n        return this.data;\n    }\n    // @deprecated Please use `nextPageInfo()` instead\n    nextPageParams() {\n        const info = this.nextPageInfo();\n        if (!info)\n            return null;\n        if ('params' in info)\n            return info.params;\n        const params = Object.fromEntries(info.url.searchParams);\n        if (!Object.keys(params).length)\n            return null;\n        return params;\n    }\n    nextPageInfo() {\n        if (!this.data?.length) {\n            return null;\n        }\n        const next = this.data[this.data.length - 1]?.id;\n        if (!next)\n            return null;\n        return { params: { after: next } };\n    }\n}\nexports.CursorPage = CursorPage;\n//# sourceMappingURL=pagination.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.APIResource = void 0;\nclass APIResource {\n    constructor(client) {\n        this._client = client;\n    }\n}\nexports.APIResource = APIResource;\n//# sourceMappingURL=resource.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Audio = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst SpeechAPI = __importStar(require(\"openai/resources/audio/speech\"));\nconst TranscriptionsAPI = __importStar(require(\"openai/resources/audio/transcriptions\"));\nconst TranslationsAPI = __importStar(require(\"openai/resources/audio/translations\"));\nclass Audio extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.transcriptions = new TranscriptionsAPI.Transcriptions(this._client);\n        this.translations = new TranslationsAPI.Translations(this._client);\n        this.speech = new SpeechAPI.Speech(this._client);\n    }\n}\nexports.Audio = Audio;\n(function (Audio) {\n    Audio.Transcriptions = TranscriptionsAPI.Transcriptions;\n    Audio.Translations = TranslationsAPI.Translations;\n    Audio.Speech = SpeechAPI.Speech;\n})(Audio = exports.Audio || (exports.Audio = {}));\n//# sourceMappingURL=audio.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Speech = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Speech extends resource_1.APIResource {\n    /**\n     * Generates audio from the input text.\n     */\n    create(body, options) {\n        return this._client.post('/audio/speech', { body, ...options, __binaryResponse: true });\n    }\n}\nexports.Speech = Speech;\n(function (Speech) {\n})(Speech = exports.Speech || (exports.Speech = {}));\n//# sourceMappingURL=speech.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Transcriptions = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Transcriptions extends resource_1.APIResource {\n    /**\n     * Transcribes audio into the input language.\n     */\n    create(body, options) {\n        return this._client.post('/audio/transcriptions', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n}\nexports.Transcriptions = Transcriptions;\n(function (Transcriptions) {\n})(Transcriptions = exports.Transcriptions || (exports.Transcriptions = {}));\n//# sourceMappingURL=transcriptions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Translations = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Translations extends resource_1.APIResource {\n    /**\n     * Translates audio into English.\n     */\n    create(body, options) {\n        return this._client.post('/audio/translations', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n}\nexports.Translations = Translations;\n(function (Translations) {\n})(Translations = exports.Translations || (exports.Translations = {}));\n//# sourceMappingURL=translations.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AssistantsPage = exports.Assistants = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst AssistantsAPI = __importStar(require(\"openai/resources/beta/assistants/assistants\"));\nconst FilesAPI = __importStar(require(\"openai/resources/beta/assistants/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Assistants extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.files = new FilesAPI.Files(this._client);\n    }\n    /**\n     * Create an assistant with a model and instructions.\n     */\n    create(body, options) {\n        return this._client.post('/assistants', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves an assistant.\n     */\n    retrieve(assistantId, options) {\n        return this._client.get(`/assistants/${assistantId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies an assistant.\n     */\n    update(assistantId, body, options) {\n        return this._client.post(`/assistants/${assistantId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/assistants', AssistantsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete an assistant.\n     */\n    del(assistantId, options) {\n        return this._client.delete(`/assistants/${assistantId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Assistants = Assistants;\nclass AssistantsPage extends pagination_1.CursorPage {\n}\nexports.AssistantsPage = AssistantsPage;\n(function (Assistants) {\n    Assistants.AssistantsPage = AssistantsAPI.AssistantsPage;\n    Assistants.Files = FilesAPI.Files;\n    Assistants.AssistantFilesPage = FilesAPI.AssistantFilesPage;\n})(Assistants = exports.Assistants || (exports.Assistants = {}));\n//# sourceMappingURL=assistants.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.AssistantFilesPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst FilesAPI = __importStar(require(\"openai/resources/beta/assistants/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Create an assistant file by attaching a\n     * [File](https://platform.openai.com/docs/api-reference/files) to an\n     * [assistant](https://platform.openai.com/docs/api-reference/assistants).\n     */\n    create(assistantId, body, options) {\n        return this._client.post(`/assistants/${assistantId}/files`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves an AssistantFile.\n     */\n    retrieve(assistantId, fileId, options) {\n        return this._client.get(`/assistants/${assistantId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(assistantId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(assistantId, {}, query);\n        }\n        return this._client.getAPIList(`/assistants/${assistantId}/files`, AssistantFilesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete an assistant file.\n     */\n    del(assistantId, fileId, options) {\n        return this._client.delete(`/assistants/${assistantId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Files = Files;\nclass AssistantFilesPage extends pagination_1.CursorPage {\n}\nexports.AssistantFilesPage = AssistantFilesPage;\n(function (Files) {\n    Files.AssistantFilesPage = FilesAPI.AssistantFilesPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Beta = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst AssistantsAPI = __importStar(require(\"openai/resources/beta/assistants/assistants\"));\nconst ChatAPI = __importStar(require(\"openai/resources/beta/chat/chat\"));\nconst ThreadsAPI = __importStar(require(\"openai/resources/beta/threads/threads\"));\nclass Beta extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.chat = new ChatAPI.Chat(this._client);\n        this.assistants = new AssistantsAPI.Assistants(this._client);\n        this.threads = new ThreadsAPI.Threads(this._client);\n    }\n}\nexports.Beta = Beta;\n(function (Beta) {\n    Beta.Chat = ChatAPI.Chat;\n    Beta.Assistants = AssistantsAPI.Assistants;\n    Beta.AssistantsPage = AssistantsAPI.AssistantsPage;\n    Beta.Threads = ThreadsAPI.Threads;\n})(Beta = exports.Beta || (exports.Beta = {}));\n//# sourceMappingURL=beta.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Chat = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst CompletionsAPI = __importStar(require(\"openai/resources/beta/chat/completions\"));\nclass Chat extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.completions = new CompletionsAPI.Completions(this._client);\n    }\n}\nexports.Chat = Chat;\n(function (Chat) {\n    Chat.Completions = CompletionsAPI.Completions;\n})(Chat = exports.Chat || (exports.Chat = {}));\n//# sourceMappingURL=chat.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = exports.ChatCompletionStream = exports.ParsingFunction = exports.ChatCompletionStreamingRunner = exports.ChatCompletionRunner = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst ChatCompletionRunner_1 = require(\"openai/lib/ChatCompletionRunner\");\nvar ChatCompletionRunner_2 = require(\"openai/lib/ChatCompletionRunner\");\nObject.defineProperty(exports, \"ChatCompletionRunner\", { enumerable: true, get: function () { return ChatCompletionRunner_2.ChatCompletionRunner; } });\nconst ChatCompletionStreamingRunner_1 = require(\"openai/lib/ChatCompletionStreamingRunner\");\nvar ChatCompletionStreamingRunner_2 = require(\"openai/lib/ChatCompletionStreamingRunner\");\nObject.defineProperty(exports, \"ChatCompletionStreamingRunner\", { enumerable: true, get: function () { return ChatCompletionStreamingRunner_2.ChatCompletionStreamingRunner; } });\nvar RunnableFunction_1 = require(\"openai/lib/RunnableFunction\");\nObject.defineProperty(exports, \"ParsingFunction\", { enumerable: true, get: function () { return RunnableFunction_1.ParsingFunction; } });\nconst ChatCompletionStream_1 = require(\"openai/lib/ChatCompletionStream\");\nvar ChatCompletionStream_2 = require(\"openai/lib/ChatCompletionStream\");\nObject.defineProperty(exports, \"ChatCompletionStream\", { enumerable: true, get: function () { return ChatCompletionStream_2.ChatCompletionStream; } });\nclass Completions extends resource_1.APIResource {\n    runFunctions(body, options) {\n        if (body.stream) {\n            return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runFunctions(this._client.chat.completions, body, options);\n        }\n        return ChatCompletionRunner_1.ChatCompletionRunner.runFunctions(this._client.chat.completions, body, options);\n    }\n    runTools(body, options) {\n        if (body.stream) {\n            return ChatCompletionStreamingRunner_1.ChatCompletionStreamingRunner.runTools(this._client.chat.completions, body, options);\n        }\n        return ChatCompletionRunner_1.ChatCompletionRunner.runTools(this._client.chat.completions, body, options);\n    }\n    /**\n     * Creates a chat completion stream\n     */\n    stream(body, options) {\n        return ChatCompletionStream_1.ChatCompletionStream.createChatCompletion(this._client.chat.completions, body, options);\n    }\n}\nexports.Completions = Completions;\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.MessageFilesPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst FilesAPI = __importStar(require(\"openai/resources/beta/threads/messages/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Retrieves a message file.\n     */\n    retrieve(threadId, messageId, fileId, options) {\n        return this._client.get(`/threads/${threadId}/messages/${messageId}/files/${fileId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, messageId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, messageId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/messages/${messageId}/files`, MessageFilesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Files = Files;\nclass MessageFilesPage extends pagination_1.CursorPage {\n}\nexports.MessageFilesPage = MessageFilesPage;\n(function (Files) {\n    Files.MessageFilesPage = FilesAPI.MessageFilesPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ThreadMessagesPage = exports.Messages = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst MessagesAPI = __importStar(require(\"openai/resources/beta/threads/messages/messages\"));\nconst FilesAPI = __importStar(require(\"openai/resources/beta/threads/messages/files\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Messages extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.files = new FilesAPI.Files(this._client);\n    }\n    /**\n     * Create a message.\n     */\n    create(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}/messages`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieve a message.\n     */\n    retrieve(threadId, messageId, options) {\n        return this._client.get(`/threads/${threadId}/messages/${messageId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a message.\n     */\n    update(threadId, messageId, body, options) {\n        return this._client.post(`/threads/${threadId}/messages/${messageId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/messages`, ThreadMessagesPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Messages = Messages;\nclass ThreadMessagesPage extends pagination_1.CursorPage {\n}\nexports.ThreadMessagesPage = ThreadMessagesPage;\n(function (Messages) {\n    Messages.ThreadMessagesPage = MessagesAPI.ThreadMessagesPage;\n    Messages.Files = FilesAPI.Files;\n    Messages.MessageFilesPage = FilesAPI.MessageFilesPage;\n})(Messages = exports.Messages || (exports.Messages = {}));\n//# sourceMappingURL=messages.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RunsPage = exports.Runs = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst RunsAPI = __importStar(require(\"openai/resources/beta/threads/runs/runs\"));\nconst StepsAPI = __importStar(require(\"openai/resources/beta/threads/runs/steps\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Runs extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.steps = new StepsAPI.Steps(this._client);\n    }\n    /**\n     * Create a run.\n     */\n    create(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves a run.\n     */\n    retrieve(threadId, runId, options) {\n        return this._client.get(`/threads/${threadId}/runs/${runId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a run.\n     */\n    update(threadId, runId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/runs`, RunsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Cancels a run that is `in_progress`.\n     */\n    cancel(threadId, runId, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}/cancel`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * When a run has the `status: \"requires_action\"` and `required_action.type` is\n     * `submit_tool_outputs`, this endpoint can be used to submit the outputs from the\n     * tool calls once they're all completed. All outputs must be submitted in a single\n     * request.\n     */\n    submitToolOutputs(threadId, runId, body, options) {\n        return this._client.post(`/threads/${threadId}/runs/${runId}/submit_tool_outputs`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Runs = Runs;\nclass RunsPage extends pagination_1.CursorPage {\n}\nexports.RunsPage = RunsPage;\n(function (Runs) {\n    Runs.RunsPage = RunsAPI.RunsPage;\n    Runs.Steps = StepsAPI.Steps;\n    Runs.RunStepsPage = StepsAPI.RunStepsPage;\n})(Runs = exports.Runs || (exports.Runs = {}));\n//# sourceMappingURL=runs.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.RunStepsPage = exports.Steps = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst StepsAPI = __importStar(require(\"openai/resources/beta/threads/runs/steps\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Steps extends resource_1.APIResource {\n    /**\n     * Retrieves a run step.\n     */\n    retrieve(threadId, runId, stepId, options) {\n        return this._client.get(`/threads/${threadId}/runs/${runId}/steps/${stepId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    list(threadId, runId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list(threadId, runId, {}, query);\n        }\n        return this._client.getAPIList(`/threads/${threadId}/runs/${runId}/steps`, RunStepsPage, {\n            query,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Steps = Steps;\nclass RunStepsPage extends pagination_1.CursorPage {\n}\nexports.RunStepsPage = RunStepsPage;\n(function (Steps) {\n    Steps.RunStepsPage = StepsAPI.RunStepsPage;\n})(Steps = exports.Steps || (exports.Steps = {}));\n//# sourceMappingURL=steps.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Threads = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst MessagesAPI = __importStar(require(\"openai/resources/beta/threads/messages/messages\"));\nconst RunsAPI = __importStar(require(\"openai/resources/beta/threads/runs/runs\"));\nclass Threads extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.runs = new RunsAPI.Runs(this._client);\n        this.messages = new MessagesAPI.Messages(this._client);\n    }\n    create(body = {}, options) {\n        if ((0, core_1.isRequestOptions)(body)) {\n            return this.create({}, body);\n        }\n        return this._client.post('/threads', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Retrieves a thread.\n     */\n    retrieve(threadId, options) {\n        return this._client.get(`/threads/${threadId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Modifies a thread.\n     */\n    update(threadId, body, options) {\n        return this._client.post(`/threads/${threadId}`, {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Delete a thread.\n     */\n    del(threadId, options) {\n        return this._client.delete(`/threads/${threadId}`, {\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n    /**\n     * Create a thread and run it in one request.\n     */\n    createAndRun(body, options) {\n        return this._client.post('/threads/runs', {\n            body,\n            ...options,\n            headers: { 'OpenAI-Beta': 'assistants=v1', ...options?.headers },\n        });\n    }\n}\nexports.Threads = Threads;\n(function (Threads) {\n    Threads.Runs = RunsAPI.Runs;\n    Threads.RunsPage = RunsAPI.RunsPage;\n    Threads.Messages = MessagesAPI.Messages;\n    Threads.ThreadMessagesPage = MessagesAPI.ThreadMessagesPage;\n})(Threads = exports.Threads || (exports.Threads = {}));\n//# sourceMappingURL=threads.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Chat = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst CompletionsAPI = __importStar(require(\"openai/resources/chat/completions\"));\nclass Chat extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.completions = new CompletionsAPI.Completions(this._client);\n    }\n}\nexports.Chat = Chat;\n(function (Chat) {\n    Chat.Completions = CompletionsAPI.Completions;\n})(Chat = exports.Chat || (exports.Chat = {}));\n//# sourceMappingURL=chat.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Completions extends resource_1.APIResource {\n    create(body, options) {\n        return this._client.post('/chat/completions', { body, ...options, stream: body.stream ?? false });\n    }\n}\nexports.Completions = Completions;\n(function (Completions) {\n})(Completions = exports.Completions || (exports.Completions = {}));\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = exports.Chat = void 0;\nvar chat_1 = require(\"./chat.js\");\nObject.defineProperty(exports, \"Chat\", { enumerable: true, get: function () { return chat_1.Chat; } });\nvar completions_1 = require(\"./completions.js\");\nObject.defineProperty(exports, \"Completions\", { enumerable: true, get: function () { return completions_1.Completions; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Completions = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Completions extends resource_1.APIResource {\n    create(body, options) {\n        return this._client.post('/completions', { body, ...options, stream: body.stream ?? false });\n    }\n}\nexports.Completions = Completions;\n(function (Completions) {\n})(Completions = exports.Completions || (exports.Completions = {}));\n//# sourceMappingURL=completions.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Edits = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Edits extends resource_1.APIResource {\n    /**\n     * Creates a new edit for the provided input, instruction, and parameters.\n     *\n     * @deprecated The Edits API is deprecated; please use Chat Completions instead.\n     *\n     * https://openai.com/blog/gpt-4-api-general-availability#deprecation-of-the-edits-api\n     */\n    create(body, options) {\n        return this._client.post('/edits', { body, ...options });\n    }\n}\nexports.Edits = Edits;\n(function (Edits) {\n})(Edits = exports.Edits || (exports.Edits = {}));\n//# sourceMappingURL=edits.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Embeddings = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Embeddings extends resource_1.APIResource {\n    /**\n     * Creates an embedding vector representing the input text.\n     */\n    create(body, options) {\n        return this._client.post('/embeddings', { body, ...options });\n    }\n}\nexports.Embeddings = Embeddings;\n(function (Embeddings) {\n})(Embeddings = exports.Embeddings || (exports.Embeddings = {}));\n//# sourceMappingURL=embeddings.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FileObjectsPage = exports.Files = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst core_2 = require(\"openai/core\");\nconst error_1 = require(\"openai/error\");\nconst FilesAPI = __importStar(require(\"openai/resources/files\"));\nconst core_3 = require(\"openai/core\");\nconst pagination_1 = require(\"openai/pagination\");\nclass Files extends resource_1.APIResource {\n    /**\n     * Upload a file that can be used across various endpoints/features. The size of\n     * all the files uploaded by one organization can be up to 100 GB.\n     *\n     * The size of individual files for can be a maximum of 512MB. See the\n     * [Assistants Tools guide](https://platform.openai.com/docs/assistants/tools) to\n     * learn more about the types of files supported. The Fine-tuning API only supports\n     * `.jsonl` files.\n     *\n     * Please [contact us](https://help.openai.com/) if you need to increase these\n     * storage limits.\n     */\n    create(body, options) {\n        return this._client.post('/files', (0, core_3.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Returns information about a specific file.\n     */\n    retrieve(fileId, options) {\n        return this._client.get(`/files/${fileId}`, options);\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/files', FileObjectsPage, { query, ...options });\n    }\n    /**\n     * Delete a file.\n     */\n    del(fileId, options) {\n        return this._client.delete(`/files/${fileId}`, options);\n    }\n    /**\n     * Returns the contents of the specified file.\n     */\n    content(fileId, options) {\n        return this._client.get(`/files/${fileId}/content`, { ...options, __binaryResponse: true });\n    }\n    /**\n     * Returns the contents of the specified file.\n     *\n     * @deprecated The `.content()` method should be used instead\n     */\n    retrieveContent(fileId, options) {\n        return this._client.get(`/files/${fileId}/content`, {\n            ...options,\n            headers: { Accept: 'application/json', ...options?.headers },\n        });\n    }\n    /**\n     * Waits for the given file to be processed, default timeout is 30 mins.\n     */\n    async waitForProcessing(id, { pollInterval = 5000, maxWait = 30 * 60 * 1000 } = {}) {\n        const TERMINAL_STATES = new Set(['processed', 'error', 'deleted']);\n        const start = Date.now();\n        let file = await this.retrieve(id);\n        while (!file.status || !TERMINAL_STATES.has(file.status)) {\n            await (0, core_2.sleep)(pollInterval);\n            file = await this.retrieve(id);\n            if (Date.now() - start > maxWait) {\n                throw new error_1.APIConnectionTimeoutError({\n                    message: `Giving up on waiting for file ${id} to finish processing after ${maxWait} milliseconds.`,\n                });\n            }\n        }\n        return file;\n    }\n}\nexports.Files = Files;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass FileObjectsPage extends pagination_1.Page {\n}\nexports.FileObjectsPage = FileObjectsPage;\n(function (Files) {\n    Files.FileObjectsPage = FilesAPI.FileObjectsPage;\n})(Files = exports.Files || (exports.Files = {}));\n//# sourceMappingURL=files.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTunesPage = exports.FineTunes = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst FineTunesAPI = __importStar(require(\"openai/resources/fine-tunes\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass FineTunes extends resource_1.APIResource {\n    /**\n     * Creates a job that fine-tunes a specified model from a given dataset.\n     *\n     * Response includes details of the enqueued job including job status and the name\n     * of the fine-tuned models once complete.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/legacy-fine-tuning)\n     */\n    create(body, options) {\n        return this._client.post('/fine-tunes', { body, ...options });\n    }\n    /**\n     * Gets info about the fine-tune job.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/legacy-fine-tuning)\n     */\n    retrieve(fineTuneId, options) {\n        return this._client.get(`/fine-tunes/${fineTuneId}`, options);\n    }\n    /**\n     * List your organization's fine-tuning jobs\n     */\n    list(options) {\n        return this._client.getAPIList('/fine-tunes', FineTunesPage, options);\n    }\n    /**\n     * Immediately cancel a fine-tune job.\n     */\n    cancel(fineTuneId, options) {\n        return this._client.post(`/fine-tunes/${fineTuneId}/cancel`, options);\n    }\n    listEvents(fineTuneId, query, options) {\n        return this._client.get(`/fine-tunes/${fineTuneId}/events`, {\n            query,\n            timeout: 86400000,\n            ...options,\n            stream: query?.stream ?? false,\n        });\n    }\n}\nexports.FineTunes = FineTunes;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass FineTunesPage extends pagination_1.Page {\n}\nexports.FineTunesPage = FineTunesPage;\n(function (FineTunes) {\n    FineTunes.FineTunesPage = FineTunesAPI.FineTunesPage;\n})(FineTunes = exports.FineTunes || (exports.FineTunes = {}));\n//# sourceMappingURL=fine-tunes.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTuning = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst JobsAPI = __importStar(require(\"openai/resources/fine-tuning/jobs\"));\nclass FineTuning extends resource_1.APIResource {\n    constructor() {\n        super(...arguments);\n        this.jobs = new JobsAPI.Jobs(this._client);\n    }\n}\nexports.FineTuning = FineTuning;\n(function (FineTuning) {\n    FineTuning.Jobs = JobsAPI.Jobs;\n    FineTuning.FineTuningJobsPage = JobsAPI.FineTuningJobsPage;\n    FineTuning.FineTuningJobEventsPage = JobsAPI.FineTuningJobEventsPage;\n})(FineTuning = exports.FineTuning || (exports.FineTuning = {}));\n//# sourceMappingURL=fine-tuning.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.FineTuningJobEventsPage = exports.FineTuningJobsPage = exports.Jobs = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nconst JobsAPI = __importStar(require(\"openai/resources/fine-tuning/jobs\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Jobs extends resource_1.APIResource {\n    /**\n     * Creates a job that fine-tunes a specified model from a given dataset.\n     *\n     * Response includes details of the enqueued job including job status and the name\n     * of the fine-tuned models once complete.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)\n     */\n    create(body, options) {\n        return this._client.post('/fine_tuning/jobs', { body, ...options });\n    }\n    /**\n     * Get info about a fine-tuning job.\n     *\n     * [Learn more about fine-tuning](https://platform.openai.com/docs/guides/fine-tuning)\n     */\n    retrieve(fineTuningJobId, options) {\n        return this._client.get(`/fine_tuning/jobs/${fineTuningJobId}`, options);\n    }\n    list(query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.list({}, query);\n        }\n        return this._client.getAPIList('/fine_tuning/jobs', FineTuningJobsPage, { query, ...options });\n    }\n    /**\n     * Immediately cancel a fine-tune job.\n     */\n    cancel(fineTuningJobId, options) {\n        return this._client.post(`/fine_tuning/jobs/${fineTuningJobId}/cancel`, options);\n    }\n    listEvents(fineTuningJobId, query = {}, options) {\n        if ((0, core_1.isRequestOptions)(query)) {\n            return this.listEvents(fineTuningJobId, {}, query);\n        }\n        return this._client.getAPIList(`/fine_tuning/jobs/${fineTuningJobId}/events`, FineTuningJobEventsPage, {\n            query,\n            ...options,\n        });\n    }\n}\nexports.Jobs = Jobs;\nclass FineTuningJobsPage extends pagination_1.CursorPage {\n}\nexports.FineTuningJobsPage = FineTuningJobsPage;\nclass FineTuningJobEventsPage extends pagination_1.CursorPage {\n}\nexports.FineTuningJobEventsPage = FineTuningJobEventsPage;\n(function (Jobs) {\n    Jobs.FineTuningJobsPage = JobsAPI.FineTuningJobsPage;\n    Jobs.FineTuningJobEventsPage = JobsAPI.FineTuningJobEventsPage;\n})(Jobs = exports.Jobs || (exports.Jobs = {}));\n//# sourceMappingURL=jobs.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Images = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst core_1 = require(\"openai/core\");\nclass Images extends resource_1.APIResource {\n    /**\n     * Creates a variation of a given image.\n     */\n    createVariation(body, options) {\n        return this._client.post('/images/variations', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Creates an edited or extended image given an original image and a prompt.\n     */\n    edit(body, options) {\n        return this._client.post('/images/edits', (0, core_1.multipartFormRequestOptions)({ body, ...options }));\n    }\n    /**\n     * Creates an image given a prompt.\n     */\n    generate(body, options) {\n        return this._client.post('/images/generations', { body, ...options });\n    }\n}\nexports.Images = Images;\n(function (Images) {\n})(Images = exports.Images || (exports.Images = {}));\n//# sourceMappingURL=images.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __exportStar = (this && this.__exportStar) || function(m, exports) {\n    for (var p in m) if (p !== \"default\" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Moderations = exports.Models = exports.ModelsPage = exports.Images = exports.FineTuning = exports.FineTunes = exports.FineTunesPage = exports.Files = exports.FileObjectsPage = exports.Edits = exports.Embeddings = exports.Completions = exports.Beta = exports.Audio = void 0;\n__exportStar(require(\"./chat/index.js\"), exports);\n__exportStar(require(\"./shared.js\"), exports);\nvar audio_1 = require(\"./audio/audio.js\");\nObject.defineProperty(exports, \"Audio\", { enumerable: true, get: function () { return audio_1.Audio; } });\nvar beta_1 = require(\"./beta/beta.js\");\nObject.defineProperty(exports, \"Beta\", { enumerable: true, get: function () { return beta_1.Beta; } });\nvar completions_1 = require(\"./completions.js\");\nObject.defineProperty(exports, \"Completions\", { enumerable: true, get: function () { return completions_1.Completions; } });\nvar embeddings_1 = require(\"./embeddings.js\");\nObject.defineProperty(exports, \"Embeddings\", { enumerable: true, get: function () { return embeddings_1.Embeddings; } });\nvar edits_1 = require(\"./edits.js\");\nObject.defineProperty(exports, \"Edits\", { enumerable: true, get: function () { return edits_1.Edits; } });\nvar files_1 = require(\"./files.js\");\nObject.defineProperty(exports, \"FileObjectsPage\", { enumerable: true, get: function () { return files_1.FileObjectsPage; } });\nObject.defineProperty(exports, \"Files\", { enumerable: true, get: function () { return files_1.Files; } });\nvar fine_tunes_1 = require(\"./fine-tunes.js\");\nObject.defineProperty(exports, \"FineTunesPage\", { enumerable: true, get: function () { return fine_tunes_1.FineTunesPage; } });\nObject.defineProperty(exports, \"FineTunes\", { enumerable: true, get: function () { return fine_tunes_1.FineTunes; } });\nvar fine_tuning_1 = require(\"./fine-tuning/fine-tuning.js\");\nObject.defineProperty(exports, \"FineTuning\", { enumerable: true, get: function () { return fine_tuning_1.FineTuning; } });\nvar images_1 = require(\"./images.js\");\nObject.defineProperty(exports, \"Images\", { enumerable: true, get: function () { return images_1.Images; } });\nvar models_1 = require(\"./models.js\");\nObject.defineProperty(exports, \"ModelsPage\", { enumerable: true, get: function () { return models_1.ModelsPage; } });\nObject.defineProperty(exports, \"Models\", { enumerable: true, get: function () { return models_1.Models; } });\nvar moderations_1 = require(\"./moderations.js\");\nObject.defineProperty(exports, \"Moderations\", { enumerable: true, get: function () { return moderations_1.Moderations; } });\n//# sourceMappingURL=index.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nvar __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    var desc = Object.getOwnPropertyDescriptor(m, k);\n    if (!desc || (\"get\" in desc ? !m.__esModule : desc.writable || desc.configurable)) {\n      desc = { enumerable: true, get: function() { return m[k]; } };\n    }\n    Object.defineProperty(o, k2, desc);\n}) : (function(o, m, k, k2) {\n    if (k2 === undefined) k2 = k;\n    o[k2] = m[k];\n}));\nvar __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {\n    Object.defineProperty(o, \"default\", { enumerable: true, value: v });\n}) : function(o, v) {\n    o[\"default\"] = v;\n});\nvar __importStar = (this && this.__importStar) || function (mod) {\n    if (mod && mod.__esModule) return mod;\n    var result = {};\n    if (mod != null) for (var k in mod) if (k !== \"default\" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);\n    __setModuleDefault(result, mod);\n    return result;\n};\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.ModelsPage = exports.Models = void 0;\nconst resource_1 = require(\"openai/resource\");\nconst ModelsAPI = __importStar(require(\"openai/resources/models\"));\nconst pagination_1 = require(\"openai/pagination\");\nclass Models extends resource_1.APIResource {\n    /**\n     * Retrieves a model instance, providing basic information about the model such as\n     * the owner and permissioning.\n     */\n    retrieve(model, options) {\n        return this._client.get(`/models/${model}`, options);\n    }\n    /**\n     * Lists the currently available models, and provides basic information about each\n     * one such as the owner and availability.\n     */\n    list(options) {\n        return this._client.getAPIList('/models', ModelsPage, options);\n    }\n    /**\n     * Delete a fine-tuned model. You must have the Owner role in your organization to\n     * delete a model.\n     */\n    del(model, options) {\n        return this._client.delete(`/models/${model}`, options);\n    }\n}\nexports.Models = Models;\n/**\n * Note: no pagination actually occurs yet, this is for forwards-compatibility.\n */\nclass ModelsPage extends pagination_1.Page {\n}\nexports.ModelsPage = ModelsPage;\n(function (Models) {\n    Models.ModelsPage = ModelsAPI.ModelsPage;\n})(Models = exports.Models || (exports.Models = {}));\n//# sourceMappingURL=models.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Moderations = void 0;\nconst resource_1 = require(\"openai/resource\");\nclass Moderations extends resource_1.APIResource {\n    /**\n     * Classifies if text violates OpenAI's Content Policy\n     */\n    create(body, options) {\n        return this._client.post('/moderations', { body, ...options });\n    }\n}\nexports.Moderations = Moderations;\n(function (Moderations) {\n})(Moderations = exports.Moderations || (exports.Moderations = {}));\n//# sourceMappingURL=moderations.js.map","\"use strict\";\n// File generated from our OpenAPI spec by Stainless.\nObject.defineProperty(exports, \"__esModule\", { value: true });\n//# sourceMappingURL=shared.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.Stream = void 0;\nconst index_1 = require(\"./_shims/index.js\");\nconst error_1 = require(\"./error.js\");\nconst error_2 = require(\"openai/error\");\nclass Stream {\n    constructor(iterator, controller) {\n        this.iterator = iterator;\n        this.controller = controller;\n    }\n    static fromSSEResponse(response, controller) {\n        let consumed = false;\n        const decoder = new SSEDecoder();\n        async function* iterMessages() {\n            if (!response.body) {\n                controller.abort();\n                throw new error_1.OpenAIError(`Attempted to iterate over a response with no body`);\n            }\n            const lineDecoder = new LineDecoder();\n            const iter = readableStreamAsyncIterable(response.body);\n            for await (const chunk of iter) {\n                for (const line of lineDecoder.decode(chunk)) {\n                    const sse = decoder.decode(line);\n                    if (sse)\n                        yield sse;\n                }\n            }\n            for (const line of lineDecoder.flush()) {\n                const sse = decoder.decode(line);\n                if (sse)\n                    yield sse;\n            }\n        }\n        async function* iterator() {\n            if (consumed) {\n                throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');\n            }\n            consumed = true;\n            let done = false;\n            try {\n                for await (const sse of iterMessages()) {\n                    if (done)\n                        continue;\n                    if (sse.data.startsWith('[DONE]')) {\n                        done = true;\n                        continue;\n                    }\n                    if (sse.event === null) {\n                        let data;\n                        try {\n                            data = JSON.parse(sse.data);\n                        }\n                        catch (e) {\n                            console.error(`Could not parse message into JSON:`, sse.data);\n                            console.error(`From chunk:`, sse.raw);\n                            throw e;\n                        }\n                        if (data && data.error) {\n                            throw new error_2.APIError(undefined, data.error, undefined, undefined);\n                        }\n                        yield data;\n                    }\n                }\n                done = true;\n            }\n            catch (e) {\n                // If the user calls `stream.controller.abort()`, we should exit without throwing.\n                if (e instanceof Error && e.name === 'AbortError')\n                    return;\n                throw e;\n            }\n            finally {\n                // If the user `break`s, abort the ongoing request.\n                if (!done)\n                    controller.abort();\n            }\n        }\n        return new Stream(iterator, controller);\n    }\n    /**\n     * Generates a Stream from a newline-separated ReadableStream\n     * where each item is a JSON value.\n     */\n    static fromReadableStream(readableStream, controller) {\n        let consumed = false;\n        async function* iterLines() {\n            const lineDecoder = new LineDecoder();\n            const iter = readableStreamAsyncIterable(readableStream);\n            for await (const chunk of iter) {\n                for (const line of lineDecoder.decode(chunk)) {\n                    yield line;\n                }\n            }\n            for (const line of lineDecoder.flush()) {\n                yield line;\n            }\n        }\n        async function* iterator() {\n            if (consumed) {\n                throw new Error('Cannot iterate over a consumed stream, use `.tee()` to split the stream.');\n            }\n            consumed = true;\n            let done = false;\n            try {\n                for await (const line of iterLines()) {\n                    if (done)\n                        continue;\n                    if (line)\n                        yield JSON.parse(line);\n                }\n                done = true;\n            }\n            catch (e) {\n                // If the user calls `stream.controller.abort()`, we should exit without throwing.\n                if (e instanceof Error && e.name === 'AbortError')\n                    return;\n                throw e;\n            }\n            finally {\n                // If the user `break`s, abort the ongoing request.\n                if (!done)\n                    controller.abort();\n            }\n        }\n        return new Stream(iterator, controller);\n    }\n    [Symbol.asyncIterator]() {\n        return this.iterator();\n    }\n    /**\n     * Splits the stream into two streams which can be\n     * independently read from at different speeds.\n     */\n    tee() {\n        const left = [];\n        const right = [];\n        const iterator = this.iterator();\n        const teeIterator = (queue) => {\n            return {\n                next: () => {\n                    if (queue.length === 0) {\n                        const result = iterator.next();\n                        left.push(result);\n                        right.push(result);\n                    }\n                    return queue.shift();\n                },\n            };\n        };\n        return [\n            new Stream(() => teeIterator(left), this.controller),\n            new Stream(() => teeIterator(right), this.controller),\n        ];\n    }\n    /**\n     * Converts this stream to a newline-separated ReadableStream of\n     * JSON stringified values in the stream\n     * which can be turned back into a Stream with `Stream.fromReadableStream()`.\n     */\n    toReadableStream() {\n        const self = this;\n        let iter;\n        const encoder = new TextEncoder();\n        return new index_1.ReadableStream({\n            async start() {\n                iter = self[Symbol.asyncIterator]();\n            },\n            async pull(ctrl) {\n                try {\n                    const { value, done } = await iter.next();\n                    if (done)\n                        return ctrl.close();\n                    const bytes = encoder.encode(JSON.stringify(value) + '\\n');\n                    ctrl.enqueue(bytes);\n                }\n                catch (err) {\n                    ctrl.error(err);\n                }\n            },\n            async cancel() {\n                await iter.return?.();\n            },\n        });\n    }\n}\nexports.Stream = Stream;\nclass SSEDecoder {\n    constructor() {\n        this.event = null;\n        this.data = [];\n        this.chunks = [];\n    }\n    decode(line) {\n        if (line.endsWith('\\r')) {\n            line = line.substring(0, line.length - 1);\n        }\n        if (!line) {\n            // empty line and we didn't previously encounter any messages\n            if (!this.event && !this.data.length)\n                return null;\n            const sse = {\n                event: this.event,\n                data: this.data.join('\\n'),\n                raw: this.chunks,\n            };\n            this.event = null;\n            this.data = [];\n            this.chunks = [];\n            return sse;\n        }\n        this.chunks.push(line);\n        if (line.startsWith(':')) {\n            return null;\n        }\n        let [fieldname, _, value] = partition(line, ':');\n        if (value.startsWith(' ')) {\n            value = value.substring(1);\n        }\n        if (fieldname === 'event') {\n            this.event = value;\n        }\n        else if (fieldname === 'data') {\n            this.data.push(value);\n        }\n        return null;\n    }\n}\n/**\n * A re-implementation of httpx's `LineDecoder` in Python that handles incrementally\n * reading lines from text.\n *\n * https://github.com/encode/httpx/blob/920333ea98118e9cf617f246905d7b202510941c/httpx/_decoders.py#L258\n */\nclass LineDecoder {\n    constructor() {\n        this.buffer = [];\n        this.trailingCR = false;\n    }\n    decode(chunk) {\n        let text = this.decodeText(chunk);\n        if (this.trailingCR) {\n            text = '\\r' + text;\n            this.trailingCR = false;\n        }\n        if (text.endsWith('\\r')) {\n            this.trailingCR = true;\n            text = text.slice(0, -1);\n        }\n        if (!text) {\n            return [];\n        }\n        const trailingNewline = LineDecoder.NEWLINE_CHARS.has(text[text.length - 1] || '');\n        let lines = text.split(LineDecoder.NEWLINE_REGEXP);\n        if (lines.length === 1 && !trailingNewline) {\n            this.buffer.push(lines[0]);\n            return [];\n        }\n        if (this.buffer.length > 0) {\n            lines = [this.buffer.join('') + lines[0], ...lines.slice(1)];\n            this.buffer = [];\n        }\n        if (!trailingNewline) {\n            this.buffer = [lines.pop() || ''];\n        }\n        return lines;\n    }\n    decodeText(bytes) {\n        if (bytes == null)\n            return '';\n        if (typeof bytes === 'string')\n            return bytes;\n        // Node:\n        if (typeof Buffer !== 'undefined') {\n            if (bytes instanceof Buffer) {\n                return bytes.toString();\n            }\n            if (bytes instanceof Uint8Array) {\n                return Buffer.from(bytes).toString();\n            }\n            throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array (${bytes.constructor.name}) stream chunk in an environment with a global \"Buffer\" defined, which this library assumes to be Node. Please report this error.`);\n        }\n        // Browser\n        if (typeof TextDecoder !== 'undefined') {\n            if (bytes instanceof Uint8Array || bytes instanceof ArrayBuffer) {\n                this.textDecoder ?? (this.textDecoder = new TextDecoder('utf8'));\n                return this.textDecoder.decode(bytes);\n            }\n            throw new error_1.OpenAIError(`Unexpected: received non-Uint8Array/ArrayBuffer (${bytes.constructor.name}) in a web platform. Please report this error.`);\n        }\n        throw new error_1.OpenAIError(`Unexpected: neither Buffer nor TextDecoder are available as globals. Please report this error.`);\n    }\n    flush() {\n        if (!this.buffer.length && !this.trailingCR) {\n            return [];\n        }\n        const lines = [this.buffer.join('')];\n        this.buffer = [];\n        this.trailingCR = false;\n        return lines;\n    }\n}\n// prettier-ignore\nLineDecoder.NEWLINE_CHARS = new Set(['\\n', '\\r', '\\x0b', '\\x0c', '\\x1c', '\\x1d', '\\x1e', '\\x85', '\\u2028', '\\u2029']);\nLineDecoder.NEWLINE_REGEXP = /\\r\\n|[\\n\\r\\x0b\\x0c\\x1c\\x1d\\x1e\\x85\\u2028\\u2029]/g;\nfunction partition(str, delimiter) {\n    const index = str.indexOf(delimiter);\n    if (index !== -1) {\n        return [str.substring(0, index), delimiter, str.substring(index + delimiter.length)];\n    }\n    return [str, '', ''];\n}\n/**\n * Most browsers don't yet have async iterable support for ReadableStream,\n * and Node has a very different way of reading bytes from its \"ReadableStream\".\n *\n * This polyfill was pulled from https://github.com/MattiasBuelens/web-streams-polyfill/pull/122#issuecomment-1627354490\n */\nfunction readableStreamAsyncIterable(stream) {\n    if (stream[Symbol.asyncIterator])\n        return stream;\n    const reader = stream.getReader();\n    return {\n        async next() {\n            try {\n                const result = await reader.read();\n                if (result?.done)\n                    reader.releaseLock(); // release lock when stream becomes closed\n                return result;\n            }\n            catch (e) {\n                reader.releaseLock(); // release lock when stream becomes errored\n                throw e;\n            }\n        },\n        async return() {\n            const cancelPromise = reader.cancel();\n            reader.releaseLock();\n            await cancelPromise;\n            return { done: true, value: undefined };\n        },\n        [Symbol.asyncIterator]() {\n            return this;\n        },\n    };\n}\n//# sourceMappingURL=streaming.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.createForm = exports.multipartFormRequestOptions = exports.maybeMultipartFormRequestOptions = exports.isMultipartBody = exports.toFile = exports.isUploadable = exports.isBlobLike = exports.isFileLike = exports.isResponseLike = exports.fileFromPath = void 0;\nconst index_1 = require(\"./_shims/index.js\");\nvar index_2 = require(\"./_shims/index.js\");\nObject.defineProperty(exports, \"fileFromPath\", { enumerable: true, get: function () { return index_2.fileFromPath; } });\nconst isResponseLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.url === 'string' &&\n    typeof value.blob === 'function';\nexports.isResponseLike = isResponseLike;\nconst isFileLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.name === 'string' &&\n    typeof value.lastModified === 'number' &&\n    (0, exports.isBlobLike)(value);\nexports.isFileLike = isFileLike;\n/**\n * The BlobLike type omits arrayBuffer() because @types/node-fetch@^2.6.4 lacks it; but this check\n * adds the arrayBuffer() method type because it is available and used at runtime\n */\nconst isBlobLike = (value) => value != null &&\n    typeof value === 'object' &&\n    typeof value.size === 'number' &&\n    typeof value.type === 'string' &&\n    typeof value.text === 'function' &&\n    typeof value.slice === 'function' &&\n    typeof value.arrayBuffer === 'function';\nexports.isBlobLike = isBlobLike;\nconst isUploadable = (value) => {\n    return (0, exports.isFileLike)(value) || (0, exports.isResponseLike)(value) || (0, index_1.isFsReadStream)(value);\n};\nexports.isUploadable = isUploadable;\n/**\n * Helper for creating a {@link File} to pass to an SDK upload method from a variety of different data formats\n * @param value the raw content of the file.  Can be an {@link Uploadable}, {@link BlobLikePart}, or {@link AsyncIterable} of {@link BlobLikePart}s\n * @param {string=} name the name of the file. If omitted, toFile will try to determine a file name from bits if possible\n * @param {Object=} options additional properties\n * @param {string=} options.type the MIME type of the content\n * @param {number=} options.lastModified the last modified timestamp\n * @returns a {@link File} with the given properties\n */\nasync function toFile(value, name, options = {}) {\n    // If it's a promise, resolve it.\n    value = await value;\n    if ((0, exports.isResponseLike)(value)) {\n        const blob = await value.blob();\n        name || (name = new URL(value.url).pathname.split(/[\\\\/]/).pop() ?? 'unknown_file');\n        return new index_1.File([blob], name, options);\n    }\n    const bits = await getBytes(value);\n    name || (name = getName(value) ?? 'unknown_file');\n    if (!options.type) {\n        const type = bits[0]?.type;\n        if (typeof type === 'string') {\n            options = { ...options, type };\n        }\n    }\n    return new index_1.File(bits, name, options);\n}\nexports.toFile = toFile;\nasync function getBytes(value) {\n    let parts = [];\n    if (typeof value === 'string' ||\n        ArrayBuffer.isView(value) || // includes Uint8Array, Buffer, etc.\n        value instanceof ArrayBuffer) {\n        parts.push(value);\n    }\n    else if ((0, exports.isBlobLike)(value)) {\n        parts.push(await value.arrayBuffer());\n    }\n    else if (isAsyncIterableIterator(value) // includes Readable, ReadableStream, etc.\n    ) {\n        for await (const chunk of value) {\n            parts.push(chunk); // TODO, consider validating?\n        }\n    }\n    else {\n        throw new Error(`Unexpected data type: ${typeof value}; constructor: ${value?.constructor?.name}; props: ${propsForError(value)}`);\n    }\n    return parts;\n}\nfunction propsForError(value) {\n    const props = Object.getOwnPropertyNames(value);\n    return `[${props.map((p) => `\"${p}\"`).join(', ')}]`;\n}\nfunction getName(value) {\n    return (getStringFromMaybeBuffer(value.name) ||\n        getStringFromMaybeBuffer(value.filename) ||\n        // For fs.ReadStream\n        getStringFromMaybeBuffer(value.path)?.split(/[\\\\/]/).pop());\n}\nconst getStringFromMaybeBuffer = (x) => {\n    if (typeof x === 'string')\n        return x;\n    if (typeof Buffer !== 'undefined' && x instanceof Buffer)\n        return String(x);\n    return undefined;\n};\nconst isAsyncIterableIterator = (value) => value != null && typeof value === 'object' && typeof value[Symbol.asyncIterator] === 'function';\nconst isMultipartBody = (body) => body && typeof body === 'object' && body.body && body[Symbol.toStringTag] === 'MultipartBody';\nexports.isMultipartBody = isMultipartBody;\n/**\n * Returns a multipart/form-data request if any part of the given request body contains a File / Blob value.\n * Otherwise returns the request as is.\n */\nconst maybeMultipartFormRequestOptions = async (opts) => {\n    if (!hasUploadableValue(opts.body))\n        return opts;\n    const form = await (0, exports.createForm)(opts.body);\n    return (0, index_1.getMultipartRequestOptions)(form, opts);\n};\nexports.maybeMultipartFormRequestOptions = maybeMultipartFormRequestOptions;\nconst multipartFormRequestOptions = async (opts) => {\n    const form = await (0, exports.createForm)(opts.body);\n    return (0, index_1.getMultipartRequestOptions)(form, opts);\n};\nexports.multipartFormRequestOptions = multipartFormRequestOptions;\nconst createForm = async (body) => {\n    const form = new index_1.FormData();\n    await Promise.all(Object.entries(body || {}).map(([key, value]) => addFormValue(form, key, value)));\n    return form;\n};\nexports.createForm = createForm;\nconst hasUploadableValue = (value) => {\n    if ((0, exports.isUploadable)(value))\n        return true;\n    if (Array.isArray(value))\n        return value.some(hasUploadableValue);\n    if (value && typeof value === 'object') {\n        for (const k in value) {\n            if (hasUploadableValue(value[k]))\n                return true;\n        }\n    }\n    return false;\n};\nconst addFormValue = async (form, key, value) => {\n    if (value === undefined)\n        return;\n    if (value == null) {\n        throw new TypeError(`Received null for \"${key}\"; to pass null in FormData, you must use the string 'null'`);\n    }\n    // TODO: make nested formats configurable\n    if (typeof value === 'string' || typeof value === 'number' || typeof value === 'boolean') {\n        form.append(key, String(value));\n    }\n    else if ((0, exports.isUploadable)(value)) {\n        const file = await toFile(value);\n        form.append(key, file);\n    }\n    else if (Array.isArray(value)) {\n        await Promise.all(value.map((entry) => addFormValue(form, key + '[]', entry)));\n    }\n    else if (typeof value === 'object') {\n        await Promise.all(Object.entries(value).map(([name, prop]) => addFormValue(form, `${key}[${name}]`, prop)));\n    }\n    else {\n        throw new TypeError(`Invalid value given to form, expected a string, number, boolean, object, Array, File or Blob but got ${value} instead`);\n    }\n};\n//# sourceMappingURL=uploads.js.map","\"use strict\";\nObject.defineProperty(exports, \"__esModule\", { value: true });\nexports.VERSION = void 0;\nexports.VERSION = '4.20.1'; // x-release-please-version\n//# sourceMappingURL=version.js.map","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveBlockMap = require('./resolve-block-map.js');\nvar resolveBlockSeq = require('./resolve-block-seq.js');\nvar resolveFlowCollection = require('./resolve-flow-collection.js');\n\nfunction resolveCollection(CN, ctx, token, onError, tagName, tag) {\n    const coll = token.type === 'block-map'\n        ? resolveBlockMap.resolveBlockMap(CN, ctx, token, onError, tag)\n        : token.type === 'block-seq'\n            ? resolveBlockSeq.resolveBlockSeq(CN, ctx, token, onError, tag)\n            : resolveFlowCollection.resolveFlowCollection(CN, ctx, token, onError, tag);\n    const Coll = coll.constructor;\n    // If we got a tagName matching the class, or the tag name is '!',\n    // then use the tagName from the node class used to create it.\n    if (tagName === '!' || tagName === Coll.tagName) {\n        coll.tag = Coll.tagName;\n        return coll;\n    }\n    if (tagName)\n        coll.tag = tagName;\n    return coll;\n}\nfunction composeCollection(CN, ctx, token, props, onError) {\n    const tagToken = props.tag;\n    const tagName = !tagToken\n        ? null\n        : ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg));\n    if (token.type === 'block-seq') {\n        const { anchor, newlineAfterProp: nl } = props;\n        const lastProp = anchor && tagToken\n            ? anchor.offset > tagToken.offset\n                ? anchor\n                : tagToken\n            : (anchor ?? tagToken);\n        if (lastProp && (!nl || nl.offset < lastProp.offset)) {\n            const message = 'Missing newline after block sequence props';\n            onError(lastProp, 'MISSING_CHAR', message);\n        }\n    }\n    const expType = token.type === 'block-map'\n        ? 'map'\n        : token.type === 'block-seq'\n            ? 'seq'\n            : token.start.source === '{'\n                ? 'map'\n                : 'seq';\n    // shortcut: check if it's a generic YAMLMap or YAMLSeq\n    // before jumping into the custom tag logic.\n    if (!tagToken ||\n        !tagName ||\n        tagName === '!' ||\n        (tagName === YAMLMap.YAMLMap.tagName && expType === 'map') ||\n        (tagName === YAMLSeq.YAMLSeq.tagName && expType === 'seq')) {\n        return resolveCollection(CN, ctx, token, onError, tagName);\n    }\n    let tag = ctx.schema.tags.find(t => t.tag === tagName && t.collection === expType);\n    if (!tag) {\n        const kt = ctx.schema.knownTags[tagName];\n        if (kt && kt.collection === expType) {\n            ctx.schema.tags.push(Object.assign({}, kt, { default: false }));\n            tag = kt;\n        }\n        else {\n            if (kt?.collection) {\n                onError(tagToken, 'BAD_COLLECTION_TYPE', `${kt.tag} used for ${expType} collection, but expects ${kt.collection}`, true);\n            }\n            else {\n                onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, true);\n            }\n            return resolveCollection(CN, ctx, token, onError, tagName);\n        }\n    }\n    const coll = resolveCollection(CN, ctx, token, onError, tagName, tag);\n    const res = tag.resolve?.(coll, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg), ctx.options) ?? coll;\n    const node = identity.isNode(res)\n        ? res\n        : new Scalar.Scalar(res);\n    node.range = coll.range;\n    node.tag = tagName;\n    if (tag?.format)\n        node.format = tag.format;\n    return node;\n}\n\nexports.composeCollection = composeCollection;\n","'use strict';\n\nvar Document = require('../doc/Document.js');\nvar composeNode = require('./compose-node.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\n\nfunction composeDoc(options, directives, { offset, start, value, end }, onError) {\n    const opts = Object.assign({ _directives: directives }, options);\n    const doc = new Document.Document(undefined, opts);\n    const ctx = {\n        atKey: false,\n        atRoot: true,\n        directives: doc.directives,\n        options: doc.options,\n        schema: doc.schema\n    };\n    const props = resolveProps.resolveProps(start, {\n        indicator: 'doc-start',\n        next: value ?? end?.[0],\n        offset,\n        onError,\n        parentIndent: 0,\n        startOnNewline: true\n    });\n    if (props.found) {\n        doc.directives.docStart = true;\n        if (value &&\n            (value.type === 'block-map' || value.type === 'block-seq') &&\n            !props.hasNewline)\n            onError(props.end, 'MISSING_CHAR', 'Block collection cannot start on same line with directives-end marker');\n    }\n    // @ts-expect-error If Contents is set, let's trust the user\n    doc.contents = value\n        ? composeNode.composeNode(ctx, value, props, onError)\n        : composeNode.composeEmptyNode(ctx, props.end, start, null, props, onError);\n    const contentEnd = doc.contents.range[2];\n    const re = resolveEnd.resolveEnd(end, contentEnd, false, onError);\n    if (re.comment)\n        doc.comment = re.comment;\n    doc.range = [offset, contentEnd, re.offset];\n    return doc;\n}\n\nexports.composeDoc = composeDoc;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar identity = require('../nodes/identity.js');\nvar composeCollection = require('./compose-collection.js');\nvar composeScalar = require('./compose-scalar.js');\nvar resolveEnd = require('./resolve-end.js');\nvar utilEmptyScalarPosition = require('./util-empty-scalar-position.js');\n\nconst CN = { composeNode, composeEmptyNode };\nfunction composeNode(ctx, token, props, onError) {\n    const atKey = ctx.atKey;\n    const { spaceBefore, comment, anchor, tag } = props;\n    let node;\n    let isSrcToken = true;\n    switch (token.type) {\n        case 'alias':\n            node = composeAlias(ctx, token, onError);\n            if (anchor || tag)\n                onError(token, 'ALIAS_PROPS', 'An alias node must not specify any properties');\n            break;\n        case 'scalar':\n        case 'single-quoted-scalar':\n        case 'double-quoted-scalar':\n        case 'block-scalar':\n            node = composeScalar.composeScalar(ctx, token, tag, onError);\n            if (anchor)\n                node.anchor = anchor.source.substring(1);\n            break;\n        case 'block-map':\n        case 'block-seq':\n        case 'flow-collection':\n            node = composeCollection.composeCollection(CN, ctx, token, props, onError);\n            if (anchor)\n                node.anchor = anchor.source.substring(1);\n            break;\n        default: {\n            const message = token.type === 'error'\n                ? token.message\n                : `Unsupported token (type: ${token.type})`;\n            onError(token, 'UNEXPECTED_TOKEN', message);\n            node = composeEmptyNode(ctx, token.offset, undefined, null, props, onError);\n            isSrcToken = false;\n        }\n    }\n    if (anchor && node.anchor === '')\n        onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n    if (atKey &&\n        ctx.options.stringKeys &&\n        (!identity.isScalar(node) ||\n            typeof node.value !== 'string' ||\n            (node.tag && node.tag !== 'tag:yaml.org,2002:str'))) {\n        const msg = 'With stringKeys, all keys must be strings';\n        onError(tag ?? token, 'NON_STRING_KEY', msg);\n    }\n    if (spaceBefore)\n        node.spaceBefore = true;\n    if (comment) {\n        if (token.type === 'scalar' && token.source === '')\n            node.comment = comment;\n        else\n            node.commentBefore = comment;\n    }\n    // @ts-expect-error Type checking misses meaning of isSrcToken\n    if (ctx.options.keepSourceTokens && isSrcToken)\n        node.srcToken = token;\n    return node;\n}\nfunction composeEmptyNode(ctx, offset, before, pos, { spaceBefore, comment, anchor, tag, end }, onError) {\n    const token = {\n        type: 'scalar',\n        offset: utilEmptyScalarPosition.emptyScalarPosition(offset, before, pos),\n        indent: -1,\n        source: ''\n    };\n    const node = composeScalar.composeScalar(ctx, token, tag, onError);\n    if (anchor) {\n        node.anchor = anchor.source.substring(1);\n        if (node.anchor === '')\n            onError(anchor, 'BAD_ALIAS', 'Anchor cannot be an empty string');\n    }\n    if (spaceBefore)\n        node.spaceBefore = true;\n    if (comment) {\n        node.comment = comment;\n        node.range[2] = end;\n    }\n    return node;\n}\nfunction composeAlias({ options }, { offset, source, end }, onError) {\n    const alias = new Alias.Alias(source.substring(1));\n    if (alias.source === '')\n        onError(offset, 'BAD_ALIAS', 'Alias cannot be an empty string');\n    if (alias.source.endsWith(':'))\n        onError(offset + source.length - 1, 'BAD_ALIAS', 'Alias ending in : is ambiguous', true);\n    const valueEnd = offset + source.length;\n    const re = resolveEnd.resolveEnd(end, valueEnd, options.strict, onError);\n    alias.range = [offset, valueEnd, re.offset];\n    if (re.comment)\n        alias.comment = re.comment;\n    return alias;\n}\n\nexports.composeEmptyNode = composeEmptyNode;\nexports.composeNode = composeNode;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveBlockScalar = require('./resolve-block-scalar.js');\nvar resolveFlowScalar = require('./resolve-flow-scalar.js');\n\nfunction composeScalar(ctx, token, tagToken, onError) {\n    const { value, type, comment, range } = token.type === 'block-scalar'\n        ? resolveBlockScalar.resolveBlockScalar(ctx, token, onError)\n        : resolveFlowScalar.resolveFlowScalar(token, ctx.options.strict, onError);\n    const tagName = tagToken\n        ? ctx.directives.tagName(tagToken.source, msg => onError(tagToken, 'TAG_RESOLVE_FAILED', msg))\n        : null;\n    let tag;\n    if (ctx.options.stringKeys && ctx.atKey) {\n        tag = ctx.schema[identity.SCALAR];\n    }\n    else if (tagName)\n        tag = findScalarTagByName(ctx.schema, value, tagName, tagToken, onError);\n    else if (token.type === 'scalar')\n        tag = findScalarTagByTest(ctx, value, token, onError);\n    else\n        tag = ctx.schema[identity.SCALAR];\n    let scalar;\n    try {\n        const res = tag.resolve(value, msg => onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg), ctx.options);\n        scalar = identity.isScalar(res) ? res : new Scalar.Scalar(res);\n    }\n    catch (error) {\n        const msg = error instanceof Error ? error.message : String(error);\n        onError(tagToken ?? token, 'TAG_RESOLVE_FAILED', msg);\n        scalar = new Scalar.Scalar(value);\n    }\n    scalar.range = range;\n    scalar.source = value;\n    if (type)\n        scalar.type = type;\n    if (tagName)\n        scalar.tag = tagName;\n    if (tag.format)\n        scalar.format = tag.format;\n    if (comment)\n        scalar.comment = comment;\n    return scalar;\n}\nfunction findScalarTagByName(schema, value, tagName, tagToken, onError) {\n    if (tagName === '!')\n        return schema[identity.SCALAR]; // non-specific tag\n    const matchWithTest = [];\n    for (const tag of schema.tags) {\n        if (!tag.collection && tag.tag === tagName) {\n            if (tag.default && tag.test)\n                matchWithTest.push(tag);\n            else\n                return tag;\n        }\n    }\n    for (const tag of matchWithTest)\n        if (tag.test?.test(value))\n            return tag;\n    const kt = schema.knownTags[tagName];\n    if (kt && !kt.collection) {\n        // Ensure that the known tag is available for stringifying,\n        // but does not get used by default.\n        schema.tags.push(Object.assign({}, kt, { default: false, test: undefined }));\n        return kt;\n    }\n    onError(tagToken, 'TAG_RESOLVE_FAILED', `Unresolved tag: ${tagName}`, tagName !== 'tag:yaml.org,2002:str');\n    return schema[identity.SCALAR];\n}\nfunction findScalarTagByTest({ atKey, directives, schema }, value, token, onError) {\n    const tag = schema.tags.find(tag => (tag.default === true || (atKey && tag.default === 'key')) &&\n        tag.test?.test(value)) || schema[identity.SCALAR];\n    if (schema.compat) {\n        const compat = schema.compat.find(tag => tag.default && tag.test?.test(value)) ??\n            schema[identity.SCALAR];\n        if (tag.tag !== compat.tag) {\n            const ts = directives.tagString(tag.tag);\n            const cs = directives.tagString(compat.tag);\n            const msg = `Value may be parsed as either ${ts} or ${cs}`;\n            onError(token, 'TAG_RESOLVE_FAILED', msg, true);\n        }\n    }\n    return tag;\n}\n\nexports.composeScalar = composeScalar;\n","'use strict';\n\nvar directives = require('../doc/directives.js');\nvar Document = require('../doc/Document.js');\nvar errors = require('../errors.js');\nvar identity = require('../nodes/identity.js');\nvar composeDoc = require('./compose-doc.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction getErrorPos(src) {\n    if (typeof src === 'number')\n        return [src, src + 1];\n    if (Array.isArray(src))\n        return src.length === 2 ? src : [src[0], src[1]];\n    const { offset, source } = src;\n    return [offset, offset + (typeof source === 'string' ? source.length : 1)];\n}\nfunction parsePrelude(prelude) {\n    let comment = '';\n    let atComment = false;\n    let afterEmptyLine = false;\n    for (let i = 0; i < prelude.length; ++i) {\n        const source = prelude[i];\n        switch (source[0]) {\n            case '#':\n                comment +=\n                    (comment === '' ? '' : afterEmptyLine ? '\\n\\n' : '\\n') +\n                        (source.substring(1) || ' ');\n                atComment = true;\n                afterEmptyLine = false;\n                break;\n            case '%':\n                if (prelude[i + 1]?.[0] !== '#')\n                    i += 1;\n                atComment = false;\n                break;\n            default:\n                // This may be wrong after doc-end, but in that case it doesn't matter\n                if (!atComment)\n                    afterEmptyLine = true;\n                atComment = false;\n        }\n    }\n    return { comment, afterEmptyLine };\n}\n/**\n * Compose a stream of CST nodes into a stream of YAML Documents.\n *\n * ```ts\n * import { Composer, Parser } from 'yaml'\n *\n * const src: string = ...\n * const tokens = new Parser().parse(src)\n * const docs = new Composer().compose(tokens)\n * ```\n */\nclass Composer {\n    constructor(options = {}) {\n        this.doc = null;\n        this.atDirectives = false;\n        this.prelude = [];\n        this.errors = [];\n        this.warnings = [];\n        this.onError = (source, code, message, warning) => {\n            const pos = getErrorPos(source);\n            if (warning)\n                this.warnings.push(new errors.YAMLWarning(pos, code, message));\n            else\n                this.errors.push(new errors.YAMLParseError(pos, code, message));\n        };\n        // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n        this.directives = new directives.Directives({ version: options.version || '1.2' });\n        this.options = options;\n    }\n    decorate(doc, afterDoc) {\n        const { comment, afterEmptyLine } = parsePrelude(this.prelude);\n        //console.log({ dc: doc.comment, prelude, comment })\n        if (comment) {\n            const dc = doc.contents;\n            if (afterDoc) {\n                doc.comment = doc.comment ? `${doc.comment}\\n${comment}` : comment;\n            }\n            else if (afterEmptyLine || doc.directives.docStart || !dc) {\n                doc.commentBefore = comment;\n            }\n            else if (identity.isCollection(dc) && !dc.flow && dc.items.length > 0) {\n                let it = dc.items[0];\n                if (identity.isPair(it))\n                    it = it.key;\n                const cb = it.commentBefore;\n                it.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n            }\n            else {\n                const cb = dc.commentBefore;\n                dc.commentBefore = cb ? `${comment}\\n${cb}` : comment;\n            }\n        }\n        if (afterDoc) {\n            Array.prototype.push.apply(doc.errors, this.errors);\n            Array.prototype.push.apply(doc.warnings, this.warnings);\n        }\n        else {\n            doc.errors = this.errors;\n            doc.warnings = this.warnings;\n        }\n        this.prelude = [];\n        this.errors = [];\n        this.warnings = [];\n    }\n    /**\n     * Current stream status information.\n     *\n     * Mostly useful at the end of input for an empty stream.\n     */\n    streamInfo() {\n        return {\n            comment: parsePrelude(this.prelude).comment,\n            directives: this.directives,\n            errors: this.errors,\n            warnings: this.warnings\n        };\n    }\n    /**\n     * Compose tokens into documents.\n     *\n     * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n     * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n     */\n    *compose(tokens, forceDoc = false, endOffset = -1) {\n        for (const token of tokens)\n            yield* this.next(token);\n        yield* this.end(forceDoc, endOffset);\n    }\n    /** Advance the composer by one CST token. */\n    *next(token) {\n        if (process.env.LOG_STREAM)\n            console.dir(token, { depth: null });\n        switch (token.type) {\n            case 'directive':\n                this.directives.add(token.source, (offset, message, warning) => {\n                    const pos = getErrorPos(token);\n                    pos[0] += offset;\n                    this.onError(pos, 'BAD_DIRECTIVE', message, warning);\n                });\n                this.prelude.push(token.source);\n                this.atDirectives = true;\n                break;\n            case 'document': {\n                const doc = composeDoc.composeDoc(this.options, this.directives, token, this.onError);\n                if (this.atDirectives && !doc.directives.docStart)\n                    this.onError(token, 'MISSING_CHAR', 'Missing directives-end/doc-start indicator line');\n                this.decorate(doc, false);\n                if (this.doc)\n                    yield this.doc;\n                this.doc = doc;\n                this.atDirectives = false;\n                break;\n            }\n            case 'byte-order-mark':\n            case 'space':\n                break;\n            case 'comment':\n            case 'newline':\n                this.prelude.push(token.source);\n                break;\n            case 'error': {\n                const msg = token.source\n                    ? `${token.message}: ${JSON.stringify(token.source)}`\n                    : token.message;\n                const error = new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg);\n                if (this.atDirectives || !this.doc)\n                    this.errors.push(error);\n                else\n                    this.doc.errors.push(error);\n                break;\n            }\n            case 'doc-end': {\n                if (!this.doc) {\n                    const msg = 'Unexpected doc-end without preceding document';\n                    this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', msg));\n                    break;\n                }\n                this.doc.directives.docEnd = true;\n                const end = resolveEnd.resolveEnd(token.end, token.offset + token.source.length, this.doc.options.strict, this.onError);\n                this.decorate(this.doc, true);\n                if (end.comment) {\n                    const dc = this.doc.comment;\n                    this.doc.comment = dc ? `${dc}\\n${end.comment}` : end.comment;\n                }\n                this.doc.range[2] = end.offset;\n                break;\n            }\n            default:\n                this.errors.push(new errors.YAMLParseError(getErrorPos(token), 'UNEXPECTED_TOKEN', `Unsupported token ${token.type}`));\n        }\n    }\n    /**\n     * Call at end of input to yield any remaining document.\n     *\n     * @param forceDoc - If the stream contains no document, still emit a final document including any comments and directives that would be applied to a subsequent document.\n     * @param endOffset - Should be set if `forceDoc` is also set, to set the document range end and to indicate errors correctly.\n     */\n    *end(forceDoc = false, endOffset = -1) {\n        if (this.doc) {\n            this.decorate(this.doc, true);\n            yield this.doc;\n            this.doc = null;\n        }\n        else if (forceDoc) {\n            const opts = Object.assign({ _directives: this.directives }, this.options);\n            const doc = new Document.Document(undefined, opts);\n            if (this.atDirectives)\n                this.onError(endOffset, 'MISSING_CHAR', 'Missing directives-end indicator line');\n            doc.range = [0, endOffset, endOffset];\n            this.decorate(doc, false);\n            yield doc;\n        }\n    }\n}\n\nexports.Composer = Composer;\n","'use strict';\n\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst startColMsg = 'All mapping items must start at the same column';\nfunction resolveBlockMap({ composeNode, composeEmptyNode }, ctx, bm, onError, tag) {\n    const NodeClass = tag?.nodeClass ?? YAMLMap.YAMLMap;\n    const map = new NodeClass(ctx.schema);\n    if (ctx.atRoot)\n        ctx.atRoot = false;\n    let offset = bm.offset;\n    let commentEnd = null;\n    for (const collItem of bm.items) {\n        const { start, key, sep, value } = collItem;\n        // key properties\n        const keyProps = resolveProps.resolveProps(start, {\n            indicator: 'explicit-key-ind',\n            next: key ?? sep?.[0],\n            offset,\n            onError,\n            parentIndent: bm.indent,\n            startOnNewline: true\n        });\n        const implicitKey = !keyProps.found;\n        if (implicitKey) {\n            if (key) {\n                if (key.type === 'block-seq')\n                    onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'A block sequence may not be used as an implicit map key');\n                else if ('indent' in key && key.indent !== bm.indent)\n                    onError(offset, 'BAD_INDENT', startColMsg);\n            }\n            if (!keyProps.anchor && !keyProps.tag && !sep) {\n                commentEnd = keyProps.end;\n                if (keyProps.comment) {\n                    if (map.comment)\n                        map.comment += '\\n' + keyProps.comment;\n                    else\n                        map.comment = keyProps.comment;\n                }\n                continue;\n            }\n            if (keyProps.newlineAfterProp || utilContainsNewline.containsNewline(key)) {\n                onError(key ?? start[start.length - 1], 'MULTILINE_IMPLICIT_KEY', 'Implicit keys need to be on a single line');\n            }\n        }\n        else if (keyProps.found?.indent !== bm.indent) {\n            onError(offset, 'BAD_INDENT', startColMsg);\n        }\n        // key value\n        ctx.atKey = true;\n        const keyStart = keyProps.end;\n        const keyNode = key\n            ? composeNode(ctx, key, keyProps, onError)\n            : composeEmptyNode(ctx, keyStart, start, null, keyProps, onError);\n        if (ctx.schema.compat)\n            utilFlowIndentCheck.flowIndentCheck(bm.indent, key, onError);\n        ctx.atKey = false;\n        if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n            onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n        // value properties\n        const valueProps = resolveProps.resolveProps(sep ?? [], {\n            indicator: 'map-value-ind',\n            next: value,\n            offset: keyNode.range[2],\n            onError,\n            parentIndent: bm.indent,\n            startOnNewline: !key || key.type === 'block-scalar'\n        });\n        offset = valueProps.end;\n        if (valueProps.found) {\n            if (implicitKey) {\n                if (value?.type === 'block-map' && !valueProps.hasNewline)\n                    onError(offset, 'BLOCK_AS_IMPLICIT_KEY', 'Nested mappings are not allowed in compact mappings');\n                if (ctx.options.strict &&\n                    keyProps.start < valueProps.found.offset - 1024)\n                    onError(keyNode.range, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit block mapping key');\n            }\n            // value value\n            const valueNode = value\n                ? composeNode(ctx, value, valueProps, onError)\n                : composeEmptyNode(ctx, offset, sep, null, valueProps, onError);\n            if (ctx.schema.compat)\n                utilFlowIndentCheck.flowIndentCheck(bm.indent, value, onError);\n            offset = valueNode.range[2];\n            const pair = new Pair.Pair(keyNode, valueNode);\n            if (ctx.options.keepSourceTokens)\n                pair.srcToken = collItem;\n            map.items.push(pair);\n        }\n        else {\n            // key with no value\n            if (implicitKey)\n                onError(keyNode.range, 'MISSING_CHAR', 'Implicit map keys need to be followed by map values');\n            if (valueProps.comment) {\n                if (keyNode.comment)\n                    keyNode.comment += '\\n' + valueProps.comment;\n                else\n                    keyNode.comment = valueProps.comment;\n            }\n            const pair = new Pair.Pair(keyNode);\n            if (ctx.options.keepSourceTokens)\n                pair.srcToken = collItem;\n            map.items.push(pair);\n        }\n    }\n    if (commentEnd && commentEnd < offset)\n        onError(commentEnd, 'IMPOSSIBLE', 'Map comment with trailing content');\n    map.range = [bm.offset, offset, commentEnd ?? offset];\n    return map;\n}\n\nexports.resolveBlockMap = resolveBlockMap;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\n\nfunction resolveBlockScalar(ctx, scalar, onError) {\n    const start = scalar.offset;\n    const header = parseBlockScalarHeader(scalar, ctx.options.strict, onError);\n    if (!header)\n        return { value: '', type: null, comment: '', range: [start, start, start] };\n    const type = header.mode === '>' ? Scalar.Scalar.BLOCK_FOLDED : Scalar.Scalar.BLOCK_LITERAL;\n    const lines = scalar.source ? splitLines(scalar.source) : [];\n    // determine the end of content & start of chomping\n    let chompStart = lines.length;\n    for (let i = lines.length - 1; i >= 0; --i) {\n        const content = lines[i][1];\n        if (content === '' || content === '\\r')\n            chompStart = i;\n        else\n            break;\n    }\n    // shortcut for empty contents\n    if (chompStart === 0) {\n        const value = header.chomp === '+' && lines.length > 0\n            ? '\\n'.repeat(Math.max(1, lines.length - 1))\n            : '';\n        let end = start + header.length;\n        if (scalar.source)\n            end += scalar.source.length;\n        return { value, type, comment: header.comment, range: [start, end, end] };\n    }\n    // find the indentation level to trim from start\n    let trimIndent = scalar.indent + header.indent;\n    let offset = scalar.offset + header.length;\n    let contentStart = 0;\n    for (let i = 0; i < chompStart; ++i) {\n        const [indent, content] = lines[i];\n        if (content === '' || content === '\\r') {\n            if (header.indent === 0 && indent.length > trimIndent)\n                trimIndent = indent.length;\n        }\n        else {\n            if (indent.length < trimIndent) {\n                const message = 'Block scalars with more-indented leading empty lines must use an explicit indentation indicator';\n                onError(offset + indent.length, 'MISSING_CHAR', message);\n            }\n            if (header.indent === 0)\n                trimIndent = indent.length;\n            contentStart = i;\n            if (trimIndent === 0 && !ctx.atRoot) {\n                const message = 'Block scalar values in collections must be indented';\n                onError(offset, 'BAD_INDENT', message);\n            }\n            break;\n        }\n        offset += indent.length + content.length + 1;\n    }\n    // include trailing more-indented empty lines in content\n    for (let i = lines.length - 1; i >= chompStart; --i) {\n        if (lines[i][0].length > trimIndent)\n            chompStart = i + 1;\n    }\n    let value = '';\n    let sep = '';\n    let prevMoreIndented = false;\n    // leading whitespace is kept intact\n    for (let i = 0; i < contentStart; ++i)\n        value += lines[i][0].slice(trimIndent) + '\\n';\n    for (let i = contentStart; i < chompStart; ++i) {\n        let [indent, content] = lines[i];\n        offset += indent.length + content.length + 1;\n        const crlf = content[content.length - 1] === '\\r';\n        if (crlf)\n            content = content.slice(0, -1);\n        /* istanbul ignore if already caught in lexer */\n        if (content && indent.length < trimIndent) {\n            const src = header.indent\n                ? 'explicit indentation indicator'\n                : 'first line';\n            const message = `Block scalar lines must not be less indented than their ${src}`;\n            onError(offset - content.length - (crlf ? 2 : 1), 'BAD_INDENT', message);\n            indent = '';\n        }\n        if (type === Scalar.Scalar.BLOCK_LITERAL) {\n            value += sep + indent.slice(trimIndent) + content;\n            sep = '\\n';\n        }\n        else if (indent.length > trimIndent || content[0] === '\\t') {\n            // more-indented content within a folded block\n            if (sep === ' ')\n                sep = '\\n';\n            else if (!prevMoreIndented && sep === '\\n')\n                sep = '\\n\\n';\n            value += sep + indent.slice(trimIndent) + content;\n            sep = '\\n';\n            prevMoreIndented = true;\n        }\n        else if (content === '') {\n            // empty line\n            if (sep === '\\n')\n                value += '\\n';\n            else\n                sep = '\\n';\n        }\n        else {\n            value += sep + content;\n            sep = ' ';\n            prevMoreIndented = false;\n        }\n    }\n    switch (header.chomp) {\n        case '-':\n            break;\n        case '+':\n            for (let i = chompStart; i < lines.length; ++i)\n                value += '\\n' + lines[i][0].slice(trimIndent);\n            if (value[value.length - 1] !== '\\n')\n                value += '\\n';\n            break;\n        default:\n            value += '\\n';\n    }\n    const end = start + header.length + scalar.source.length;\n    return { value, type, comment: header.comment, range: [start, end, end] };\n}\nfunction parseBlockScalarHeader({ offset, props }, strict, onError) {\n    /* istanbul ignore if should not happen */\n    if (props[0].type !== 'block-scalar-header') {\n        onError(props[0], 'IMPOSSIBLE', 'Block scalar header not found');\n        return null;\n    }\n    const { source } = props[0];\n    const mode = source[0];\n    let indent = 0;\n    let chomp = '';\n    let error = -1;\n    for (let i = 1; i < source.length; ++i) {\n        const ch = source[i];\n        if (!chomp && (ch === '-' || ch === '+'))\n            chomp = ch;\n        else {\n            const n = Number(ch);\n            if (!indent && n)\n                indent = n;\n            else if (error === -1)\n                error = offset + i;\n        }\n    }\n    if (error !== -1)\n        onError(error, 'UNEXPECTED_TOKEN', `Block scalar header includes extra characters: ${source}`);\n    let hasSpace = false;\n    let comment = '';\n    let length = source.length;\n    for (let i = 1; i < props.length; ++i) {\n        const token = props[i];\n        switch (token.type) {\n            case 'space':\n                hasSpace = true;\n            // fallthrough\n            case 'newline':\n                length += token.source.length;\n                break;\n            case 'comment':\n                if (strict && !hasSpace) {\n                    const message = 'Comments must be separated from other tokens by white space characters';\n                    onError(token, 'MISSING_CHAR', message);\n                }\n                length += token.source.length;\n                comment = token.source.substring(1);\n                break;\n            case 'error':\n                onError(token, 'UNEXPECTED_TOKEN', token.message);\n                length += token.source.length;\n                break;\n            /* istanbul ignore next should not happen */\n            default: {\n                const message = `Unexpected token in block scalar header: ${token.type}`;\n                onError(token, 'UNEXPECTED_TOKEN', message);\n                const ts = token.source;\n                if (ts && typeof ts === 'string')\n                    length += ts.length;\n            }\n        }\n    }\n    return { mode, indent, chomp, comment, length };\n}\n/** @returns Array of lines split up as `[indent, content]` */\nfunction splitLines(source) {\n    const split = source.split(/\\n( *)/);\n    const first = split[0];\n    const m = first.match(/^( *)/);\n    const line0 = m?.[1]\n        ? [m[1], first.slice(m[1].length)]\n        : ['', first];\n    const lines = [line0];\n    for (let i = 1; i < split.length; i += 2)\n        lines.push([split[i], split[i + 1]]);\n    return lines;\n}\n\nexports.resolveBlockScalar = resolveBlockScalar;\n","'use strict';\n\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilFlowIndentCheck = require('./util-flow-indent-check.js');\n\nfunction resolveBlockSeq({ composeNode, composeEmptyNode }, ctx, bs, onError, tag) {\n    const NodeClass = tag?.nodeClass ?? YAMLSeq.YAMLSeq;\n    const seq = new NodeClass(ctx.schema);\n    if (ctx.atRoot)\n        ctx.atRoot = false;\n    if (ctx.atKey)\n        ctx.atKey = false;\n    let offset = bs.offset;\n    let commentEnd = null;\n    for (const { start, value } of bs.items) {\n        const props = resolveProps.resolveProps(start, {\n            indicator: 'seq-item-ind',\n            next: value,\n            offset,\n            onError,\n            parentIndent: bs.indent,\n            startOnNewline: true\n        });\n        if (!props.found) {\n            if (props.anchor || props.tag || value) {\n                if (value && value.type === 'block-seq')\n                    onError(props.end, 'BAD_INDENT', 'All sequence items must start at the same column');\n                else\n                    onError(offset, 'MISSING_CHAR', 'Sequence item without - indicator');\n            }\n            else {\n                commentEnd = props.end;\n                if (props.comment)\n                    seq.comment = props.comment;\n                continue;\n            }\n        }\n        const node = value\n            ? composeNode(ctx, value, props, onError)\n            : composeEmptyNode(ctx, props.end, start, null, props, onError);\n        if (ctx.schema.compat)\n            utilFlowIndentCheck.flowIndentCheck(bs.indent, value, onError);\n        offset = node.range[2];\n        seq.items.push(node);\n    }\n    seq.range = [bs.offset, offset, commentEnd ?? offset];\n    return seq;\n}\n\nexports.resolveBlockSeq = resolveBlockSeq;\n","'use strict';\n\nfunction resolveEnd(end, offset, reqSpace, onError) {\n    let comment = '';\n    if (end) {\n        let hasSpace = false;\n        let sep = '';\n        for (const token of end) {\n            const { source, type } = token;\n            switch (type) {\n                case 'space':\n                    hasSpace = true;\n                    break;\n                case 'comment': {\n                    if (reqSpace && !hasSpace)\n                        onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n                    const cb = source.substring(1) || ' ';\n                    if (!comment)\n                        comment = cb;\n                    else\n                        comment += sep + cb;\n                    sep = '';\n                    break;\n                }\n                case 'newline':\n                    if (comment)\n                        sep += source;\n                    hasSpace = true;\n                    break;\n                default:\n                    onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${type} at node end`);\n            }\n            offset += source.length;\n        }\n    }\n    return { comment, offset };\n}\n\nexports.resolveEnd = resolveEnd;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Pair = require('../nodes/Pair.js');\nvar YAMLMap = require('../nodes/YAMLMap.js');\nvar YAMLSeq = require('../nodes/YAMLSeq.js');\nvar resolveEnd = require('./resolve-end.js');\nvar resolveProps = require('./resolve-props.js');\nvar utilContainsNewline = require('./util-contains-newline.js');\nvar utilMapIncludes = require('./util-map-includes.js');\n\nconst blockMsg = 'Block collections are not allowed within flow collections';\nconst isBlock = (token) => token && (token.type === 'block-map' || token.type === 'block-seq');\nfunction resolveFlowCollection({ composeNode, composeEmptyNode }, ctx, fc, onError, tag) {\n    const isMap = fc.start.source === '{';\n    const fcName = isMap ? 'flow map' : 'flow sequence';\n    const NodeClass = (tag?.nodeClass ?? (isMap ? YAMLMap.YAMLMap : YAMLSeq.YAMLSeq));\n    const coll = new NodeClass(ctx.schema);\n    coll.flow = true;\n    const atRoot = ctx.atRoot;\n    if (atRoot)\n        ctx.atRoot = false;\n    if (ctx.atKey)\n        ctx.atKey = false;\n    let offset = fc.offset + fc.start.source.length;\n    for (let i = 0; i < fc.items.length; ++i) {\n        const collItem = fc.items[i];\n        const { start, key, sep, value } = collItem;\n        const props = resolveProps.resolveProps(start, {\n            flow: fcName,\n            indicator: 'explicit-key-ind',\n            next: key ?? sep?.[0],\n            offset,\n            onError,\n            parentIndent: fc.indent,\n            startOnNewline: false\n        });\n        if (!props.found) {\n            if (!props.anchor && !props.tag && !sep && !value) {\n                if (i === 0 && props.comma)\n                    onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n                else if (i < fc.items.length - 1)\n                    onError(props.start, 'UNEXPECTED_TOKEN', `Unexpected empty item in ${fcName}`);\n                if (props.comment) {\n                    if (coll.comment)\n                        coll.comment += '\\n' + props.comment;\n                    else\n                        coll.comment = props.comment;\n                }\n                offset = props.end;\n                continue;\n            }\n            if (!isMap && ctx.options.strict && utilContainsNewline.containsNewline(key))\n                onError(key, // checked by containsNewline()\n                'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n        }\n        if (i === 0) {\n            if (props.comma)\n                onError(props.comma, 'UNEXPECTED_TOKEN', `Unexpected , in ${fcName}`);\n        }\n        else {\n            if (!props.comma)\n                onError(props.start, 'MISSING_CHAR', `Missing , between ${fcName} items`);\n            if (props.comment) {\n                let prevItemComment = '';\n                loop: for (const st of start) {\n                    switch (st.type) {\n                        case 'comma':\n                        case 'space':\n                            break;\n                        case 'comment':\n                            prevItemComment = st.source.substring(1);\n                            break loop;\n                        default:\n                            break loop;\n                    }\n                }\n                if (prevItemComment) {\n                    let prev = coll.items[coll.items.length - 1];\n                    if (identity.isPair(prev))\n                        prev = prev.value ?? prev.key;\n                    if (prev.comment)\n                        prev.comment += '\\n' + prevItemComment;\n                    else\n                        prev.comment = prevItemComment;\n                    props.comment = props.comment.substring(prevItemComment.length + 1);\n                }\n            }\n        }\n        if (!isMap && !sep && !props.found) {\n            // item is a value in a seq\n            // → key & sep are empty, start does not include ? or :\n            const valueNode = value\n                ? composeNode(ctx, value, props, onError)\n                : composeEmptyNode(ctx, props.end, sep, null, props, onError);\n            coll.items.push(valueNode);\n            offset = valueNode.range[2];\n            if (isBlock(value))\n                onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n        }\n        else {\n            // item is a key+value pair\n            // key value\n            ctx.atKey = true;\n            const keyStart = props.end;\n            const keyNode = key\n                ? composeNode(ctx, key, props, onError)\n                : composeEmptyNode(ctx, keyStart, start, null, props, onError);\n            if (isBlock(key))\n                onError(keyNode.range, 'BLOCK_IN_FLOW', blockMsg);\n            ctx.atKey = false;\n            // value properties\n            const valueProps = resolveProps.resolveProps(sep ?? [], {\n                flow: fcName,\n                indicator: 'map-value-ind',\n                next: value,\n                offset: keyNode.range[2],\n                onError,\n                parentIndent: fc.indent,\n                startOnNewline: false\n            });\n            if (valueProps.found) {\n                if (!isMap && !props.found && ctx.options.strict) {\n                    if (sep)\n                        for (const st of sep) {\n                            if (st === valueProps.found)\n                                break;\n                            if (st.type === 'newline') {\n                                onError(st, 'MULTILINE_IMPLICIT_KEY', 'Implicit keys of flow sequence pairs need to be on a single line');\n                                break;\n                            }\n                        }\n                    if (props.start < valueProps.found.offset - 1024)\n                        onError(valueProps.found, 'KEY_OVER_1024_CHARS', 'The : indicator must be at most 1024 chars after the start of an implicit flow sequence key');\n                }\n            }\n            else if (value) {\n                if ('source' in value && value.source && value.source[0] === ':')\n                    onError(value, 'MISSING_CHAR', `Missing space after : in ${fcName}`);\n                else\n                    onError(valueProps.start, 'MISSING_CHAR', `Missing , or : between ${fcName} items`);\n            }\n            // value value\n            const valueNode = value\n                ? composeNode(ctx, value, valueProps, onError)\n                : valueProps.found\n                    ? composeEmptyNode(ctx, valueProps.end, sep, null, valueProps, onError)\n                    : null;\n            if (valueNode) {\n                if (isBlock(value))\n                    onError(valueNode.range, 'BLOCK_IN_FLOW', blockMsg);\n            }\n            else if (valueProps.comment) {\n                if (keyNode.comment)\n                    keyNode.comment += '\\n' + valueProps.comment;\n                else\n                    keyNode.comment = valueProps.comment;\n            }\n            const pair = new Pair.Pair(keyNode, valueNode);\n            if (ctx.options.keepSourceTokens)\n                pair.srcToken = collItem;\n            if (isMap) {\n                const map = coll;\n                if (utilMapIncludes.mapIncludes(ctx, map.items, keyNode))\n                    onError(keyStart, 'DUPLICATE_KEY', 'Map keys must be unique');\n                map.items.push(pair);\n            }\n            else {\n                const map = new YAMLMap.YAMLMap(ctx.schema);\n                map.flow = true;\n                map.items.push(pair);\n                const endRange = (valueNode ?? keyNode).range;\n                map.range = [keyNode.range[0], endRange[1], endRange[2]];\n                coll.items.push(map);\n            }\n            offset = valueNode ? valueNode.range[2] : valueProps.end;\n        }\n    }\n    const expectedEnd = isMap ? '}' : ']';\n    const [ce, ...ee] = fc.end;\n    let cePos = offset;\n    if (ce && ce.source === expectedEnd)\n        cePos = ce.offset + ce.source.length;\n    else {\n        const name = fcName[0].toUpperCase() + fcName.substring(1);\n        const msg = atRoot\n            ? `${name} must end with a ${expectedEnd}`\n            : `${name} in block collection must be sufficiently indented and end with a ${expectedEnd}`;\n        onError(offset, atRoot ? 'MISSING_CHAR' : 'BAD_INDENT', msg);\n        if (ce && ce.source.length !== 1)\n            ee.unshift(ce);\n    }\n    if (ee.length > 0) {\n        const end = resolveEnd.resolveEnd(ee, cePos, ctx.options.strict, onError);\n        if (end.comment) {\n            if (coll.comment)\n                coll.comment += '\\n' + end.comment;\n            else\n                coll.comment = end.comment;\n        }\n        coll.range = [fc.offset, cePos, end.offset];\n    }\n    else {\n        coll.range = [fc.offset, cePos, cePos];\n    }\n    return coll;\n}\n\nexports.resolveFlowCollection = resolveFlowCollection;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar resolveEnd = require('./resolve-end.js');\n\nfunction resolveFlowScalar(scalar, strict, onError) {\n    const { offset, type, source, end } = scalar;\n    let _type;\n    let value;\n    const _onError = (rel, code, msg) => onError(offset + rel, code, msg);\n    switch (type) {\n        case 'scalar':\n            _type = Scalar.Scalar.PLAIN;\n            value = plainValue(source, _onError);\n            break;\n        case 'single-quoted-scalar':\n            _type = Scalar.Scalar.QUOTE_SINGLE;\n            value = singleQuotedValue(source, _onError);\n            break;\n        case 'double-quoted-scalar':\n            _type = Scalar.Scalar.QUOTE_DOUBLE;\n            value = doubleQuotedValue(source, _onError);\n            break;\n        /* istanbul ignore next should not happen */\n        default:\n            onError(scalar, 'UNEXPECTED_TOKEN', `Expected a flow scalar value, but found: ${type}`);\n            return {\n                value: '',\n                type: null,\n                comment: '',\n                range: [offset, offset + source.length, offset + source.length]\n            };\n    }\n    const valueEnd = offset + source.length;\n    const re = resolveEnd.resolveEnd(end, valueEnd, strict, onError);\n    return {\n        value,\n        type: _type,\n        comment: re.comment,\n        range: [offset, valueEnd, re.offset]\n    };\n}\nfunction plainValue(source, onError) {\n    let badChar = '';\n    switch (source[0]) {\n        /* istanbul ignore next should not happen */\n        case '\\t':\n            badChar = 'a tab character';\n            break;\n        case ',':\n            badChar = 'flow indicator character ,';\n            break;\n        case '%':\n            badChar = 'directive indicator character %';\n            break;\n        case '|':\n        case '>': {\n            badChar = `block scalar indicator ${source[0]}`;\n            break;\n        }\n        case '@':\n        case '`': {\n            badChar = `reserved character ${source[0]}`;\n            break;\n        }\n    }\n    if (badChar)\n        onError(0, 'BAD_SCALAR_START', `Plain value cannot start with ${badChar}`);\n    return foldLines(source);\n}\nfunction singleQuotedValue(source, onError) {\n    if (source[source.length - 1] !== \"'\" || source.length === 1)\n        onError(source.length, 'MISSING_CHAR', \"Missing closing 'quote\");\n    return foldLines(source.slice(1, -1)).replace(/''/g, \"'\");\n}\nfunction foldLines(source) {\n    /**\n     * The negative lookbehind here and in the `re` RegExp is to\n     * prevent causing a polynomial search time in certain cases.\n     *\n     * The try-catch is for Safari, which doesn't support this yet:\n     * https://caniuse.com/js-regexp-lookbehind\n     */\n    let first, line;\n    try {\n        first = new RegExp('(.*?)(? wsStart ? source.slice(wsStart, i + 1) : ch;\n        }\n        else {\n            res += ch;\n        }\n    }\n    if (source[source.length - 1] !== '\"' || source.length === 1)\n        onError(source.length, 'MISSING_CHAR', 'Missing closing \"quote');\n    return res;\n}\n/**\n * Fold a single newline into a space, multiple newlines to N - 1 newlines.\n * Presumes `source[offset] === '\\n'`\n */\nfunction foldNewline(source, offset) {\n    let fold = '';\n    let ch = source[offset + 1];\n    while (ch === ' ' || ch === '\\t' || ch === '\\n' || ch === '\\r') {\n        if (ch === '\\r' && source[offset + 2] !== '\\n')\n            break;\n        if (ch === '\\n')\n            fold += '\\n';\n        offset += 1;\n        ch = source[offset + 1];\n    }\n    if (!fold)\n        fold = ' ';\n    return { fold, offset };\n}\nconst escapeCodes = {\n    '0': '\\0', // null character\n    a: '\\x07', // bell character\n    b: '\\b', // backspace\n    e: '\\x1b', // escape character\n    f: '\\f', // form feed\n    n: '\\n', // line feed\n    r: '\\r', // carriage return\n    t: '\\t', // horizontal tab\n    v: '\\v', // vertical tab\n    N: '\\u0085', // Unicode next line\n    _: '\\u00a0', // Unicode non-breaking space\n    L: '\\u2028', // Unicode line separator\n    P: '\\u2029', // Unicode paragraph separator\n    ' ': ' ',\n    '\"': '\"',\n    '/': '/',\n    '\\\\': '\\\\',\n    '\\t': '\\t'\n};\nfunction parseCharCode(source, offset, length, onError) {\n    const cc = source.substr(offset, length);\n    const ok = cc.length === length && /^[0-9a-fA-F]+$/.test(cc);\n    const code = ok ? parseInt(cc, 16) : NaN;\n    if (isNaN(code)) {\n        const raw = source.substr(offset - 2, length + 2);\n        onError(offset - 2, 'BAD_DQ_ESCAPE', `Invalid escape sequence ${raw}`);\n        return raw;\n    }\n    return String.fromCodePoint(code);\n}\n\nexports.resolveFlowScalar = resolveFlowScalar;\n","'use strict';\n\nfunction resolveProps(tokens, { flow, indicator, next, offset, onError, parentIndent, startOnNewline }) {\n    let spaceBefore = false;\n    let atNewline = startOnNewline;\n    let hasSpace = startOnNewline;\n    let comment = '';\n    let commentSep = '';\n    let hasNewline = false;\n    let reqSpace = false;\n    let tab = null;\n    let anchor = null;\n    let tag = null;\n    let newlineAfterProp = null;\n    let comma = null;\n    let found = null;\n    let start = null;\n    for (const token of tokens) {\n        if (reqSpace) {\n            if (token.type !== 'space' &&\n                token.type !== 'newline' &&\n                token.type !== 'comma')\n                onError(token.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n            reqSpace = false;\n        }\n        if (tab) {\n            if (atNewline && token.type !== 'comment' && token.type !== 'newline') {\n                onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n            }\n            tab = null;\n        }\n        switch (token.type) {\n            case 'space':\n                // At the doc level, tabs at line start may be parsed\n                // as leading white space rather than indentation.\n                // In a flow collection, only the parser handles indent.\n                if (!flow &&\n                    (indicator !== 'doc-start' || next?.type !== 'flow-collection') &&\n                    token.source.includes('\\t')) {\n                    tab = token;\n                }\n                hasSpace = true;\n                break;\n            case 'comment': {\n                if (!hasSpace)\n                    onError(token, 'MISSING_CHAR', 'Comments must be separated from other tokens by white space characters');\n                const cb = token.source.substring(1) || ' ';\n                if (!comment)\n                    comment = cb;\n                else\n                    comment += commentSep + cb;\n                commentSep = '';\n                atNewline = false;\n                break;\n            }\n            case 'newline':\n                if (atNewline) {\n                    if (comment)\n                        comment += token.source;\n                    else\n                        spaceBefore = true;\n                }\n                else\n                    commentSep += token.source;\n                atNewline = true;\n                hasNewline = true;\n                if (anchor || tag)\n                    newlineAfterProp = token;\n                hasSpace = true;\n                break;\n            case 'anchor':\n                if (anchor)\n                    onError(token, 'MULTIPLE_ANCHORS', 'A node can have at most one anchor');\n                if (token.source.endsWith(':'))\n                    onError(token.offset + token.source.length - 1, 'BAD_ALIAS', 'Anchor ending in : is ambiguous', true);\n                anchor = token;\n                if (start === null)\n                    start = token.offset;\n                atNewline = false;\n                hasSpace = false;\n                reqSpace = true;\n                break;\n            case 'tag': {\n                if (tag)\n                    onError(token, 'MULTIPLE_TAGS', 'A node can have at most one tag');\n                tag = token;\n                if (start === null)\n                    start = token.offset;\n                atNewline = false;\n                hasSpace = false;\n                reqSpace = true;\n                break;\n            }\n            case indicator:\n                // Could here handle preceding comments differently\n                if (anchor || tag)\n                    onError(token, 'BAD_PROP_ORDER', `Anchors and tags must be after the ${token.source} indicator`);\n                if (found)\n                    onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.source} in ${flow ?? 'collection'}`);\n                found = token;\n                atNewline =\n                    indicator === 'seq-item-ind' || indicator === 'explicit-key-ind';\n                hasSpace = false;\n                break;\n            case 'comma':\n                if (flow) {\n                    if (comma)\n                        onError(token, 'UNEXPECTED_TOKEN', `Unexpected , in ${flow}`);\n                    comma = token;\n                    atNewline = false;\n                    hasSpace = false;\n                    break;\n                }\n            // else fallthrough\n            default:\n                onError(token, 'UNEXPECTED_TOKEN', `Unexpected ${token.type} token`);\n                atNewline = false;\n                hasSpace = false;\n        }\n    }\n    const last = tokens[tokens.length - 1];\n    const end = last ? last.offset + last.source.length : offset;\n    if (reqSpace &&\n        next &&\n        next.type !== 'space' &&\n        next.type !== 'newline' &&\n        next.type !== 'comma' &&\n        (next.type !== 'scalar' || next.source !== '')) {\n        onError(next.offset, 'MISSING_CHAR', 'Tags and anchors must be separated from the next token by white space');\n    }\n    if (tab &&\n        ((atNewline && tab.indent <= parentIndent) ||\n            next?.type === 'block-map' ||\n            next?.type === 'block-seq'))\n        onError(tab, 'TAB_AS_INDENT', 'Tabs are not allowed as indentation');\n    return {\n        comma,\n        found,\n        spaceBefore,\n        comment,\n        hasNewline,\n        anchor,\n        tag,\n        newlineAfterProp,\n        end,\n        start: start ?? end\n    };\n}\n\nexports.resolveProps = resolveProps;\n","'use strict';\n\nfunction containsNewline(key) {\n    if (!key)\n        return null;\n    switch (key.type) {\n        case 'alias':\n        case 'scalar':\n        case 'double-quoted-scalar':\n        case 'single-quoted-scalar':\n            if (key.source.includes('\\n'))\n                return true;\n            if (key.end)\n                for (const st of key.end)\n                    if (st.type === 'newline')\n                        return true;\n            return false;\n        case 'flow-collection':\n            for (const it of key.items) {\n                for (const st of it.start)\n                    if (st.type === 'newline')\n                        return true;\n                if (it.sep)\n                    for (const st of it.sep)\n                        if (st.type === 'newline')\n                            return true;\n                if (containsNewline(it.key) || containsNewline(it.value))\n                    return true;\n            }\n            return false;\n        default:\n            return true;\n    }\n}\n\nexports.containsNewline = containsNewline;\n","'use strict';\n\nfunction emptyScalarPosition(offset, before, pos) {\n    if (before) {\n        if (pos === null)\n            pos = before.length;\n        for (let i = pos - 1; i >= 0; --i) {\n            let st = before[i];\n            switch (st.type) {\n                case 'space':\n                case 'comment':\n                case 'newline':\n                    offset -= st.source.length;\n                    continue;\n            }\n            // Technically, an empty scalar is immediately after the last non-empty\n            // node, but it's more useful to place it after any whitespace.\n            st = before[++i];\n            while (st?.type === 'space') {\n                offset += st.source.length;\n                st = before[++i];\n            }\n            break;\n        }\n    }\n    return offset;\n}\n\nexports.emptyScalarPosition = emptyScalarPosition;\n","'use strict';\n\nvar utilContainsNewline = require('./util-contains-newline.js');\n\nfunction flowIndentCheck(indent, fc, onError) {\n    if (fc?.type === 'flow-collection') {\n        const end = fc.end[0];\n        if (end.indent === indent &&\n            (end.source === ']' || end.source === '}') &&\n            utilContainsNewline.containsNewline(fc)) {\n            const msg = 'Flow end indicator should be more indented than parent';\n            onError(end, 'BAD_INDENT', msg, true);\n        }\n    }\n}\n\nexports.flowIndentCheck = flowIndentCheck;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\n\nfunction mapIncludes(ctx, items, search) {\n    const { uniqueKeys } = ctx.options;\n    if (uniqueKeys === false)\n        return false;\n    const isEqual = typeof uniqueKeys === 'function'\n        ? uniqueKeys\n        : (a, b) => a === b || (identity.isScalar(a) && identity.isScalar(b) && a.value === b.value);\n    return items.some(pair => isEqual(pair.key, search));\n}\n\nexports.mapIncludes = mapIncludes;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar Collection = require('../nodes/Collection.js');\nvar identity = require('../nodes/identity.js');\nvar Pair = require('../nodes/Pair.js');\nvar toJS = require('../nodes/toJS.js');\nvar Schema = require('../schema/Schema.js');\nvar stringifyDocument = require('../stringify/stringifyDocument.js');\nvar anchors = require('./anchors.js');\nvar applyReviver = require('./applyReviver.js');\nvar createNode = require('./createNode.js');\nvar directives = require('./directives.js');\n\nclass Document {\n    constructor(value, replacer, options) {\n        /** A comment before this Document */\n        this.commentBefore = null;\n        /** A comment immediately after this Document */\n        this.comment = null;\n        /** Errors encountered during parsing. */\n        this.errors = [];\n        /** Warnings encountered during parsing. */\n        this.warnings = [];\n        Object.defineProperty(this, identity.NODE_TYPE, { value: identity.DOC });\n        let _replacer = null;\n        if (typeof replacer === 'function' || Array.isArray(replacer)) {\n            _replacer = replacer;\n        }\n        else if (options === undefined && replacer) {\n            options = replacer;\n            replacer = undefined;\n        }\n        const opt = Object.assign({\n            intAsBigInt: false,\n            keepSourceTokens: false,\n            logLevel: 'warn',\n            prettyErrors: true,\n            strict: true,\n            stringKeys: false,\n            uniqueKeys: true,\n            version: '1.2'\n        }, options);\n        this.options = opt;\n        let { version } = opt;\n        if (options?._directives) {\n            this.directives = options._directives.atDocument();\n            if (this.directives.yaml.explicit)\n                version = this.directives.yaml.version;\n        }\n        else\n            this.directives = new directives.Directives({ version });\n        this.setSchema(version, options);\n        // @ts-expect-error We can't really know that this matches Contents.\n        this.contents =\n            value === undefined ? null : this.createNode(value, _replacer, options);\n    }\n    /**\n     * Create a deep copy of this Document and its contents.\n     *\n     * Custom Node values that inherit from `Object` still refer to their original instances.\n     */\n    clone() {\n        const copy = Object.create(Document.prototype, {\n            [identity.NODE_TYPE]: { value: identity.DOC }\n        });\n        copy.commentBefore = this.commentBefore;\n        copy.comment = this.comment;\n        copy.errors = this.errors.slice();\n        copy.warnings = this.warnings.slice();\n        copy.options = Object.assign({}, this.options);\n        if (this.directives)\n            copy.directives = this.directives.clone();\n        copy.schema = this.schema.clone();\n        // @ts-expect-error We can't really know that this matches Contents.\n        copy.contents = identity.isNode(this.contents)\n            ? this.contents.clone(copy.schema)\n            : this.contents;\n        if (this.range)\n            copy.range = this.range.slice();\n        return copy;\n    }\n    /** Adds a value to the document. */\n    add(value) {\n        if (assertCollection(this.contents))\n            this.contents.add(value);\n    }\n    /** Adds a value to the document. */\n    addIn(path, value) {\n        if (assertCollection(this.contents))\n            this.contents.addIn(path, value);\n    }\n    /**\n     * Create a new `Alias` node, ensuring that the target `node` has the required anchor.\n     *\n     * If `node` already has an anchor, `name` is ignored.\n     * Otherwise, the `node.anchor` value will be set to `name`,\n     * or if an anchor with that name is already present in the document,\n     * `name` will be used as a prefix for a new unique anchor.\n     * If `name` is undefined, the generated anchor will use 'a' as a prefix.\n     */\n    createAlias(node, name) {\n        if (!node.anchor) {\n            const prev = anchors.anchorNames(this);\n            node.anchor =\n                // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n                !name || prev.has(name) ? anchors.findNewAnchor(name || 'a', prev) : name;\n        }\n        return new Alias.Alias(node.anchor);\n    }\n    createNode(value, replacer, options) {\n        let _replacer = undefined;\n        if (typeof replacer === 'function') {\n            value = replacer.call({ '': value }, '', value);\n            _replacer = replacer;\n        }\n        else if (Array.isArray(replacer)) {\n            const keyToStr = (v) => typeof v === 'number' || v instanceof String || v instanceof Number;\n            const asStr = replacer.filter(keyToStr).map(String);\n            if (asStr.length > 0)\n                replacer = replacer.concat(asStr);\n            _replacer = replacer;\n        }\n        else if (options === undefined && replacer) {\n            options = replacer;\n            replacer = undefined;\n        }\n        const { aliasDuplicateObjects, anchorPrefix, flow, keepUndefined, onTagObj, tag } = options ?? {};\n        const { onAnchor, setAnchors, sourceObjects } = anchors.createNodeAnchors(this, \n        // eslint-disable-next-line @typescript-eslint/prefer-nullish-coalescing\n        anchorPrefix || 'a');\n        const ctx = {\n            aliasDuplicateObjects: aliasDuplicateObjects ?? true,\n            keepUndefined: keepUndefined ?? false,\n            onAnchor,\n            onTagObj,\n            replacer: _replacer,\n            schema: this.schema,\n            sourceObjects\n        };\n        const node = createNode.createNode(value, tag, ctx);\n        if (flow && identity.isCollection(node))\n            node.flow = true;\n        setAnchors();\n        return node;\n    }\n    /**\n     * Convert a key and a value into a `Pair` using the current schema,\n     * recursively wrapping all values as `Scalar` or `Collection` nodes.\n     */\n    createPair(key, value, options = {}) {\n        const k = this.createNode(key, null, options);\n        const v = this.createNode(value, null, options);\n        return new Pair.Pair(k, v);\n    }\n    /**\n     * Removes a value from the document.\n     * @returns `true` if the item was found and removed.\n     */\n    delete(key) {\n        return assertCollection(this.contents) ? this.contents.delete(key) : false;\n    }\n    /**\n     * Removes a value from the document.\n     * @returns `true` if the item was found and removed.\n     */\n    deleteIn(path) {\n        if (Collection.isEmptyPath(path)) {\n            if (this.contents == null)\n                return false;\n            // @ts-expect-error Presumed impossible if Strict extends false\n            this.contents = null;\n            return true;\n        }\n        return assertCollection(this.contents)\n            ? this.contents.deleteIn(path)\n            : false;\n    }\n    /**\n     * Returns item at `key`, or `undefined` if not found. By default unwraps\n     * scalar values from their surrounding node; to disable set `keepScalar` to\n     * `true` (collections are always returned intact).\n     */\n    get(key, keepScalar) {\n        return identity.isCollection(this.contents)\n            ? this.contents.get(key, keepScalar)\n            : undefined;\n    }\n    /**\n     * Returns item at `path`, or `undefined` if not found. By default unwraps\n     * scalar values from their surrounding node; to disable set `keepScalar` to\n     * `true` (collections are always returned intact).\n     */\n    getIn(path, keepScalar) {\n        if (Collection.isEmptyPath(path))\n            return !keepScalar && identity.isScalar(this.contents)\n                ? this.contents.value\n                : this.contents;\n        return identity.isCollection(this.contents)\n            ? this.contents.getIn(path, keepScalar)\n            : undefined;\n    }\n    /**\n     * Checks if the document includes a value with the key `key`.\n     */\n    has(key) {\n        return identity.isCollection(this.contents) ? this.contents.has(key) : false;\n    }\n    /**\n     * Checks if the document includes a value at `path`.\n     */\n    hasIn(path) {\n        if (Collection.isEmptyPath(path))\n            return this.contents !== undefined;\n        return identity.isCollection(this.contents) ? this.contents.hasIn(path) : false;\n    }\n    /**\n     * Sets a value in this document. For `!!set`, `value` needs to be a\n     * boolean to add/remove the item from the set.\n     */\n    set(key, value) {\n        if (this.contents == null) {\n            // @ts-expect-error We can't really know that this matches Contents.\n            this.contents = Collection.collectionFromPath(this.schema, [key], value);\n        }\n        else if (assertCollection(this.contents)) {\n            this.contents.set(key, value);\n        }\n    }\n    /**\n     * Sets a value in this document. For `!!set`, `value` needs to be a\n     * boolean to add/remove the item from the set.\n     */\n    setIn(path, value) {\n        if (Collection.isEmptyPath(path)) {\n            // @ts-expect-error We can't really know that this matches Contents.\n            this.contents = value;\n        }\n        else if (this.contents == null) {\n            // @ts-expect-error We can't really know that this matches Contents.\n            this.contents = Collection.collectionFromPath(this.schema, Array.from(path), value);\n        }\n        else if (assertCollection(this.contents)) {\n            this.contents.setIn(path, value);\n        }\n    }\n    /**\n     * Change the YAML version and schema used by the document.\n     * A `null` version disables support for directives, explicit tags, anchors, and aliases.\n     * It also requires the `schema` option to be given as a `Schema` instance value.\n     *\n     * Overrides all previously set schema options.\n     */\n    setSchema(version, options = {}) {\n        if (typeof version === 'number')\n            version = String(version);\n        let opt;\n        switch (version) {\n            case '1.1':\n                if (this.directives)\n                    this.directives.yaml.version = '1.1';\n                else\n                    this.directives = new directives.Directives({ version: '1.1' });\n                opt = { resolveKnownTags: false, schema: 'yaml-1.1' };\n                break;\n            case '1.2':\n            case 'next':\n                if (this.directives)\n                    this.directives.yaml.version = version;\n                else\n                    this.directives = new directives.Directives({ version });\n                opt = { resolveKnownTags: true, schema: 'core' };\n                break;\n            case null:\n                if (this.directives)\n                    delete this.directives;\n                opt = null;\n                break;\n            default: {\n                const sv = JSON.stringify(version);\n                throw new Error(`Expected '1.1', '1.2' or null as first argument, but found: ${sv}`);\n            }\n        }\n        // Not using `instanceof Schema` to allow for duck typing\n        if (options.schema instanceof Object)\n            this.schema = options.schema;\n        else if (opt)\n            this.schema = new Schema.Schema(Object.assign(opt, options));\n        else\n            throw new Error(`With a null YAML version, the { schema: Schema } option is required`);\n    }\n    // json & jsonArg are only used from toJSON()\n    toJS({ json, jsonArg, mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n        const ctx = {\n            anchors: new Map(),\n            doc: this,\n            keep: !json,\n            mapAsMap: mapAsMap === true,\n            mapKeyWarned: false,\n            maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100\n        };\n        const res = toJS.toJS(this.contents, jsonArg ?? '', ctx);\n        if (typeof onAnchor === 'function')\n            for (const { count, res } of ctx.anchors.values())\n                onAnchor(res, count);\n        return typeof reviver === 'function'\n            ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n            : res;\n    }\n    /**\n     * A JSON representation of the document `contents`.\n     *\n     * @param jsonArg Used by `JSON.stringify` to indicate the array index or\n     *   property name.\n     */\n    toJSON(jsonArg, onAnchor) {\n        return this.toJS({ json: true, jsonArg, mapAsMap: false, onAnchor });\n    }\n    /** A YAML representation of the document. */\n    toString(options = {}) {\n        if (this.errors.length > 0)\n            throw new Error('Document with errors cannot be stringified');\n        if ('indent' in options &&\n            (!Number.isInteger(options.indent) || Number(options.indent) <= 0)) {\n            const s = JSON.stringify(options.indent);\n            throw new Error(`\"indent\" option must be a positive integer, not ${s}`);\n        }\n        return stringifyDocument.stringifyDocument(this, options);\n    }\n}\nfunction assertCollection(contents) {\n    if (identity.isCollection(contents))\n        return true;\n    throw new Error('Expected a YAML collection as document contents');\n}\n\nexports.Document = Document;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar visit = require('../visit.js');\n\n/**\n * Verify that the input string is a valid anchor.\n *\n * Will throw on errors.\n */\nfunction anchorIsValid(anchor) {\n    if (/[\\x00-\\x19\\s,[\\]{}]/.test(anchor)) {\n        const sa = JSON.stringify(anchor);\n        const msg = `Anchor must not contain whitespace or control characters: ${sa}`;\n        throw new Error(msg);\n    }\n    return true;\n}\nfunction anchorNames(root) {\n    const anchors = new Set();\n    visit.visit(root, {\n        Value(_key, node) {\n            if (node.anchor)\n                anchors.add(node.anchor);\n        }\n    });\n    return anchors;\n}\n/** Find a new anchor name with the given `prefix` and a one-indexed suffix. */\nfunction findNewAnchor(prefix, exclude) {\n    for (let i = 1; true; ++i) {\n        const name = `${prefix}${i}`;\n        if (!exclude.has(name))\n            return name;\n    }\n}\nfunction createNodeAnchors(doc, prefix) {\n    const aliasObjects = [];\n    const sourceObjects = new Map();\n    let prevAnchors = null;\n    return {\n        onAnchor: (source) => {\n            aliasObjects.push(source);\n            if (!prevAnchors)\n                prevAnchors = anchorNames(doc);\n            const anchor = findNewAnchor(prefix, prevAnchors);\n            prevAnchors.add(anchor);\n            return anchor;\n        },\n        /**\n         * With circular references, the source node is only resolved after all\n         * of its child nodes are. This is why anchors are set only after all of\n         * the nodes have been created.\n         */\n        setAnchors: () => {\n            for (const source of aliasObjects) {\n                const ref = sourceObjects.get(source);\n                if (typeof ref === 'object' &&\n                    ref.anchor &&\n                    (identity.isScalar(ref.node) || identity.isCollection(ref.node))) {\n                    ref.node.anchor = ref.anchor;\n                }\n                else {\n                    const error = new Error('Failed to resolve repeated object (this should not happen)');\n                    error.source = source;\n                    throw error;\n                }\n            }\n        },\n        sourceObjects\n    };\n}\n\nexports.anchorIsValid = anchorIsValid;\nexports.anchorNames = anchorNames;\nexports.createNodeAnchors = createNodeAnchors;\nexports.findNewAnchor = findNewAnchor;\n","'use strict';\n\n/**\n * Applies the JSON.parse reviver algorithm as defined in the ECMA-262 spec,\n * in section 24.5.1.1 \"Runtime Semantics: InternalizeJSONProperty\" of the\n * 2021 edition: https://tc39.es/ecma262/#sec-json.parse\n *\n * Includes extensions for handling Map and Set objects.\n */\nfunction applyReviver(reviver, obj, key, val) {\n    if (val && typeof val === 'object') {\n        if (Array.isArray(val)) {\n            for (let i = 0, len = val.length; i < len; ++i) {\n                const v0 = val[i];\n                const v1 = applyReviver(reviver, val, String(i), v0);\n                // eslint-disable-next-line @typescript-eslint/no-array-delete\n                if (v1 === undefined)\n                    delete val[i];\n                else if (v1 !== v0)\n                    val[i] = v1;\n            }\n        }\n        else if (val instanceof Map) {\n            for (const k of Array.from(val.keys())) {\n                const v0 = val.get(k);\n                const v1 = applyReviver(reviver, val, k, v0);\n                if (v1 === undefined)\n                    val.delete(k);\n                else if (v1 !== v0)\n                    val.set(k, v1);\n            }\n        }\n        else if (val instanceof Set) {\n            for (const v0 of Array.from(val)) {\n                const v1 = applyReviver(reviver, val, v0, v0);\n                if (v1 === undefined)\n                    val.delete(v0);\n                else if (v1 !== v0) {\n                    val.delete(v0);\n                    val.add(v1);\n                }\n            }\n        }\n        else {\n            for (const [k, v0] of Object.entries(val)) {\n                const v1 = applyReviver(reviver, val, k, v0);\n                if (v1 === undefined)\n                    delete val[k];\n                else if (v1 !== v0)\n                    val[k] = v1;\n            }\n        }\n    }\n    return reviver.call(obj, key, val);\n}\n\nexports.applyReviver = applyReviver;\n","'use strict';\n\nvar Alias = require('../nodes/Alias.js');\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\n\nconst defaultTagPrefix = 'tag:yaml.org,2002:';\nfunction findTagObject(value, tagName, tags) {\n    if (tagName) {\n        const match = tags.filter(t => t.tag === tagName);\n        const tagObj = match.find(t => !t.format) ?? match[0];\n        if (!tagObj)\n            throw new Error(`Tag ${tagName} not found`);\n        return tagObj;\n    }\n    return tags.find(t => t.identify?.(value) && !t.format);\n}\nfunction createNode(value, tagName, ctx) {\n    if (identity.isDocument(value))\n        value = value.contents;\n    if (identity.isNode(value))\n        return value;\n    if (identity.isPair(value)) {\n        const map = ctx.schema[identity.MAP].createNode?.(ctx.schema, null, ctx);\n        map.items.push(value);\n        return map;\n    }\n    if (value instanceof String ||\n        value instanceof Number ||\n        value instanceof Boolean ||\n        (typeof BigInt !== 'undefined' && value instanceof BigInt) // not supported everywhere\n    ) {\n        // https://tc39.es/ecma262/#sec-serializejsonproperty\n        value = value.valueOf();\n    }\n    const { aliasDuplicateObjects, onAnchor, onTagObj, schema, sourceObjects } = ctx;\n    // Detect duplicate references to the same object & use Alias nodes for all\n    // after first. The `ref` wrapper allows for circular references to resolve.\n    let ref = undefined;\n    if (aliasDuplicateObjects && value && typeof value === 'object') {\n        ref = sourceObjects.get(value);\n        if (ref) {\n            if (!ref.anchor)\n                ref.anchor = onAnchor(value);\n            return new Alias.Alias(ref.anchor);\n        }\n        else {\n            ref = { anchor: null, node: null };\n            sourceObjects.set(value, ref);\n        }\n    }\n    if (tagName?.startsWith('!!'))\n        tagName = defaultTagPrefix + tagName.slice(2);\n    let tagObj = findTagObject(value, tagName, schema.tags);\n    if (!tagObj) {\n        if (value && typeof value.toJSON === 'function') {\n            // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n            value = value.toJSON();\n        }\n        if (!value || typeof value !== 'object') {\n            const node = new Scalar.Scalar(value);\n            if (ref)\n                ref.node = node;\n            return node;\n        }\n        tagObj =\n            value instanceof Map\n                ? schema[identity.MAP]\n                : Symbol.iterator in Object(value)\n                    ? schema[identity.SEQ]\n                    : schema[identity.MAP];\n    }\n    if (onTagObj) {\n        onTagObj(tagObj);\n        delete ctx.onTagObj;\n    }\n    const node = tagObj?.createNode\n        ? tagObj.createNode(ctx.schema, value, ctx)\n        : typeof tagObj?.nodeClass?.from === 'function'\n            ? tagObj.nodeClass.from(ctx.schema, value, ctx)\n            : new Scalar.Scalar(value);\n    if (tagName)\n        node.tag = tagName;\n    else if (!tagObj.default)\n        node.tag = tagObj.tag;\n    if (ref)\n        ref.node = node;\n    return node;\n}\n\nexports.createNode = createNode;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar visit = require('../visit.js');\n\nconst escapeChars = {\n    '!': '%21',\n    ',': '%2C',\n    '[': '%5B',\n    ']': '%5D',\n    '{': '%7B',\n    '}': '%7D'\n};\nconst escapeTagName = (tn) => tn.replace(/[!,[\\]{}]/g, ch => escapeChars[ch]);\nclass Directives {\n    constructor(yaml, tags) {\n        /**\n         * The directives-end/doc-start marker `---`. If `null`, a marker may still be\n         * included in the document's stringified representation.\n         */\n        this.docStart = null;\n        /** The doc-end marker `...`.  */\n        this.docEnd = false;\n        this.yaml = Object.assign({}, Directives.defaultYaml, yaml);\n        this.tags = Object.assign({}, Directives.defaultTags, tags);\n    }\n    clone() {\n        const copy = new Directives(this.yaml, this.tags);\n        copy.docStart = this.docStart;\n        return copy;\n    }\n    /**\n     * During parsing, get a Directives instance for the current document and\n     * update the stream state according to the current version's spec.\n     */\n    atDocument() {\n        const res = new Directives(this.yaml, this.tags);\n        switch (this.yaml.version) {\n            case '1.1':\n                this.atNextDocument = true;\n                break;\n            case '1.2':\n                this.atNextDocument = false;\n                this.yaml = {\n                    explicit: Directives.defaultYaml.explicit,\n                    version: '1.2'\n                };\n                this.tags = Object.assign({}, Directives.defaultTags);\n                break;\n        }\n        return res;\n    }\n    /**\n     * @param onError - May be called even if the action was successful\n     * @returns `true` on success\n     */\n    add(line, onError) {\n        if (this.atNextDocument) {\n            this.yaml = { explicit: Directives.defaultYaml.explicit, version: '1.1' };\n            this.tags = Object.assign({}, Directives.defaultTags);\n            this.atNextDocument = false;\n        }\n        const parts = line.trim().split(/[ \\t]+/);\n        const name = parts.shift();\n        switch (name) {\n            case '%TAG': {\n                if (parts.length !== 2) {\n                    onError(0, '%TAG directive should contain exactly two parts');\n                    if (parts.length < 2)\n                        return false;\n                }\n                const [handle, prefix] = parts;\n                this.tags[handle] = prefix;\n                return true;\n            }\n            case '%YAML': {\n                this.yaml.explicit = true;\n                if (parts.length !== 1) {\n                    onError(0, '%YAML directive should contain exactly one part');\n                    return false;\n                }\n                const [version] = parts;\n                if (version === '1.1' || version === '1.2') {\n                    this.yaml.version = version;\n                    return true;\n                }\n                else {\n                    const isValid = /^\\d+\\.\\d+$/.test(version);\n                    onError(6, `Unsupported YAML version ${version}`, isValid);\n                    return false;\n                }\n            }\n            default:\n                onError(0, `Unknown directive ${name}`, true);\n                return false;\n        }\n    }\n    /**\n     * Resolves a tag, matching handles to those defined in %TAG directives.\n     *\n     * @returns Resolved tag, which may also be the non-specific tag `'!'` or a\n     *   `'!local'` tag, or `null` if unresolvable.\n     */\n    tagName(source, onError) {\n        if (source === '!')\n            return '!'; // non-specific tag\n        if (source[0] !== '!') {\n            onError(`Not a valid tag: ${source}`);\n            return null;\n        }\n        if (source[1] === '<') {\n            const verbatim = source.slice(2, -1);\n            if (verbatim === '!' || verbatim === '!!') {\n                onError(`Verbatim tags aren't resolved, so ${source} is invalid.`);\n                return null;\n            }\n            if (source[source.length - 1] !== '>')\n                onError('Verbatim tags must end with a >');\n            return verbatim;\n        }\n        const [, handle, suffix] = source.match(/^(.*!)([^!]*)$/s);\n        if (!suffix)\n            onError(`The ${source} tag has no suffix`);\n        const prefix = this.tags[handle];\n        if (prefix) {\n            try {\n                return prefix + decodeURIComponent(suffix);\n            }\n            catch (error) {\n                onError(String(error));\n                return null;\n            }\n        }\n        if (handle === '!')\n            return source; // local tag\n        onError(`Could not resolve tag: ${source}`);\n        return null;\n    }\n    /**\n     * Given a fully resolved tag, returns its printable string form,\n     * taking into account current tag prefixes and defaults.\n     */\n    tagString(tag) {\n        for (const [handle, prefix] of Object.entries(this.tags)) {\n            if (tag.startsWith(prefix))\n                return handle + escapeTagName(tag.substring(prefix.length));\n        }\n        return tag[0] === '!' ? tag : `!<${tag}>`;\n    }\n    toString(doc) {\n        const lines = this.yaml.explicit\n            ? [`%YAML ${this.yaml.version || '1.2'}`]\n            : [];\n        const tagEntries = Object.entries(this.tags);\n        let tagNames;\n        if (doc && tagEntries.length > 0 && identity.isNode(doc.contents)) {\n            const tags = {};\n            visit.visit(doc.contents, (_key, node) => {\n                if (identity.isNode(node) && node.tag)\n                    tags[node.tag] = true;\n            });\n            tagNames = Object.keys(tags);\n        }\n        else\n            tagNames = [];\n        for (const [handle, prefix] of tagEntries) {\n            if (handle === '!!' && prefix === 'tag:yaml.org,2002:')\n                continue;\n            if (!doc || tagNames.some(tn => tn.startsWith(prefix)))\n                lines.push(`%TAG ${handle} ${prefix}`);\n        }\n        return lines.join('\\n');\n    }\n}\nDirectives.defaultYaml = { explicit: false, version: '1.2' };\nDirectives.defaultTags = { '!!': 'tag:yaml.org,2002:' };\n\nexports.Directives = Directives;\n","'use strict';\n\nclass YAMLError extends Error {\n    constructor(name, pos, code, message) {\n        super();\n        this.name = name;\n        this.code = code;\n        this.message = message;\n        this.pos = pos;\n    }\n}\nclass YAMLParseError extends YAMLError {\n    constructor(pos, code, message) {\n        super('YAMLParseError', pos, code, message);\n    }\n}\nclass YAMLWarning extends YAMLError {\n    constructor(pos, code, message) {\n        super('YAMLWarning', pos, code, message);\n    }\n}\nconst prettifyError = (src, lc) => (error) => {\n    if (error.pos[0] === -1)\n        return;\n    error.linePos = error.pos.map(pos => lc.linePos(pos));\n    const { line, col } = error.linePos[0];\n    error.message += ` at line ${line}, column ${col}`;\n    let ci = col - 1;\n    let lineStr = src\n        .substring(lc.lineStarts[line - 1], lc.lineStarts[line])\n        .replace(/[\\n\\r]+$/, '');\n    // Trim to max 80 chars, keeping col position near the middle\n    if (ci >= 60 && lineStr.length > 80) {\n        const trimStart = Math.min(ci - 39, lineStr.length - 79);\n        lineStr = '…' + lineStr.substring(trimStart);\n        ci -= trimStart - 1;\n    }\n    if (lineStr.length > 80)\n        lineStr = lineStr.substring(0, 79) + '…';\n    // Include previous line in context if pointing at line start\n    if (line > 1 && /^ *$/.test(lineStr.substring(0, ci))) {\n        // Regexp won't match if start is trimmed\n        let prev = src.substring(lc.lineStarts[line - 2], lc.lineStarts[line - 1]);\n        if (prev.length > 80)\n            prev = prev.substring(0, 79) + '…\\n';\n        lineStr = prev + lineStr;\n    }\n    if (/[^ ]/.test(lineStr)) {\n        let count = 1;\n        const end = error.linePos[1];\n        if (end && end.line === line && end.col > col) {\n            count = Math.max(1, Math.min(end.col - col, 80 - ci));\n        }\n        const pointer = ' '.repeat(ci) + '^'.repeat(count);\n        error.message += `:\\n\\n${lineStr}\\n${pointer}\\n`;\n    }\n};\n\nexports.YAMLError = YAMLError;\nexports.YAMLParseError = YAMLParseError;\nexports.YAMLWarning = YAMLWarning;\nexports.prettifyError = prettifyError;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar Schema = require('./schema/Schema.js');\nvar errors = require('./errors.js');\nvar Alias = require('./nodes/Alias.js');\nvar identity = require('./nodes/identity.js');\nvar Pair = require('./nodes/Pair.js');\nvar Scalar = require('./nodes/Scalar.js');\nvar YAMLMap = require('./nodes/YAMLMap.js');\nvar YAMLSeq = require('./nodes/YAMLSeq.js');\nvar cst = require('./parse/cst.js');\nvar lexer = require('./parse/lexer.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\nvar publicApi = require('./public-api.js');\nvar visit = require('./visit.js');\n\n\n\nexports.Composer = composer.Composer;\nexports.Document = Document.Document;\nexports.Schema = Schema.Schema;\nexports.YAMLError = errors.YAMLError;\nexports.YAMLParseError = errors.YAMLParseError;\nexports.YAMLWarning = errors.YAMLWarning;\nexports.Alias = Alias.Alias;\nexports.isAlias = identity.isAlias;\nexports.isCollection = identity.isCollection;\nexports.isDocument = identity.isDocument;\nexports.isMap = identity.isMap;\nexports.isNode = identity.isNode;\nexports.isPair = identity.isPair;\nexports.isScalar = identity.isScalar;\nexports.isSeq = identity.isSeq;\nexports.Pair = Pair.Pair;\nexports.Scalar = Scalar.Scalar;\nexports.YAMLMap = YAMLMap.YAMLMap;\nexports.YAMLSeq = YAMLSeq.YAMLSeq;\nexports.CST = cst;\nexports.Lexer = lexer.Lexer;\nexports.LineCounter = lineCounter.LineCounter;\nexports.Parser = parser.Parser;\nexports.parse = publicApi.parse;\nexports.parseAllDocuments = publicApi.parseAllDocuments;\nexports.parseDocument = publicApi.parseDocument;\nexports.stringify = publicApi.stringify;\nexports.visit = visit.visit;\nexports.visitAsync = visit.visitAsync;\n","'use strict';\n\nfunction debug(logLevel, ...messages) {\n    if (logLevel === 'debug')\n        console.log(...messages);\n}\nfunction warn(logLevel, warning) {\n    if (logLevel === 'debug' || logLevel === 'warn') {\n        if (typeof process !== 'undefined' && process.emitWarning)\n            process.emitWarning(warning);\n        else\n            console.warn(warning);\n    }\n}\n\nexports.debug = debug;\nexports.warn = warn;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar visit = require('../visit.js');\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nclass Alias extends Node.NodeBase {\n    constructor(source) {\n        super(identity.ALIAS);\n        this.source = source;\n        Object.defineProperty(this, 'tag', {\n            set() {\n                throw new Error('Alias nodes cannot have tags');\n            }\n        });\n    }\n    /**\n     * Resolve the value of this alias within `doc`, finding the last\n     * instance of the `source` anchor before this node.\n     */\n    resolve(doc) {\n        let found = undefined;\n        visit.visit(doc, {\n            Node: (_key, node) => {\n                if (node === this)\n                    return visit.visit.BREAK;\n                if (node.anchor === this.source)\n                    found = node;\n            }\n        });\n        return found;\n    }\n    toJSON(_arg, ctx) {\n        if (!ctx)\n            return { source: this.source };\n        const { anchors, doc, maxAliasCount } = ctx;\n        const source = this.resolve(doc);\n        if (!source) {\n            const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n            throw new ReferenceError(msg);\n        }\n        let data = anchors.get(source);\n        if (!data) {\n            // Resolve anchors for Node.prototype.toJS()\n            toJS.toJS(source, null, ctx);\n            data = anchors.get(source);\n        }\n        /* istanbul ignore if */\n        if (!data || data.res === undefined) {\n            const msg = 'This should not happen: Alias anchor was not resolved?';\n            throw new ReferenceError(msg);\n        }\n        if (maxAliasCount >= 0) {\n            data.count += 1;\n            if (data.aliasCount === 0)\n                data.aliasCount = getAliasCount(doc, source, anchors);\n            if (data.count * data.aliasCount > maxAliasCount) {\n                const msg = 'Excessive alias count indicates a resource exhaustion attack';\n                throw new ReferenceError(msg);\n            }\n        }\n        return data.res;\n    }\n    toString(ctx, _onComment, _onChompKeep) {\n        const src = `*${this.source}`;\n        if (ctx) {\n            anchors.anchorIsValid(this.source);\n            if (ctx.options.verifyAliasOrder && !ctx.anchors.has(this.source)) {\n                const msg = `Unresolved alias (the anchor must be set before the alias): ${this.source}`;\n                throw new Error(msg);\n            }\n            if (ctx.implicitKey)\n                return `${src} `;\n        }\n        return src;\n    }\n}\nfunction getAliasCount(doc, node, anchors) {\n    if (identity.isAlias(node)) {\n        const source = node.resolve(doc);\n        const anchor = anchors && source && anchors.get(source);\n        return anchor ? anchor.count * anchor.aliasCount : 0;\n    }\n    else if (identity.isCollection(node)) {\n        let count = 0;\n        for (const item of node.items) {\n            const c = getAliasCount(doc, item, anchors);\n            if (c > count)\n                count = c;\n        }\n        return count;\n    }\n    else if (identity.isPair(node)) {\n        const kc = getAliasCount(doc, node.key, anchors);\n        const vc = getAliasCount(doc, node.value, anchors);\n        return Math.max(kc, vc);\n    }\n    return 1;\n}\n\nexports.Alias = Alias;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\n\nfunction collectionFromPath(schema, path, value) {\n    let v = value;\n    for (let i = path.length - 1; i >= 0; --i) {\n        const k = path[i];\n        if (typeof k === 'number' && Number.isInteger(k) && k >= 0) {\n            const a = [];\n            a[k] = v;\n            v = a;\n        }\n        else {\n            v = new Map([[k, v]]);\n        }\n    }\n    return createNode.createNode(v, undefined, {\n        aliasDuplicateObjects: false,\n        keepUndefined: false,\n        onAnchor: () => {\n            throw new Error('This should not happen, please report a bug.');\n        },\n        schema,\n        sourceObjects: new Map()\n    });\n}\n// Type guard is intentionally a little wrong so as to be more useful,\n// as it does not cover untypable empty non-string iterables (e.g. []).\nconst isEmptyPath = (path) => path == null ||\n    (typeof path === 'object' && !!path[Symbol.iterator]().next().done);\nclass Collection extends Node.NodeBase {\n    constructor(type, schema) {\n        super(type);\n        Object.defineProperty(this, 'schema', {\n            value: schema,\n            configurable: true,\n            enumerable: false,\n            writable: true\n        });\n    }\n    /**\n     * Create a copy of this collection.\n     *\n     * @param schema - If defined, overwrites the original's schema\n     */\n    clone(schema) {\n        const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n        if (schema)\n            copy.schema = schema;\n        copy.items = copy.items.map(it => identity.isNode(it) || identity.isPair(it) ? it.clone(schema) : it);\n        if (this.range)\n            copy.range = this.range.slice();\n        return copy;\n    }\n    /**\n     * Adds a value to the collection. For `!!map` and `!!omap` the value must\n     * be a Pair instance or a `{ key, value }` object, which may not have a key\n     * that already exists in the map.\n     */\n    addIn(path, value) {\n        if (isEmptyPath(path))\n            this.add(value);\n        else {\n            const [key, ...rest] = path;\n            const node = this.get(key, true);\n            if (identity.isCollection(node))\n                node.addIn(rest, value);\n            else if (node === undefined && this.schema)\n                this.set(key, collectionFromPath(this.schema, rest, value));\n            else\n                throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n        }\n    }\n    /**\n     * Removes a value from the collection.\n     * @returns `true` if the item was found and removed.\n     */\n    deleteIn(path) {\n        const [key, ...rest] = path;\n        if (rest.length === 0)\n            return this.delete(key);\n        const node = this.get(key, true);\n        if (identity.isCollection(node))\n            return node.deleteIn(rest);\n        else\n            throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n    }\n    /**\n     * Returns item at `key`, or `undefined` if not found. By default unwraps\n     * scalar values from their surrounding node; to disable set `keepScalar` to\n     * `true` (collections are always returned intact).\n     */\n    getIn(path, keepScalar) {\n        const [key, ...rest] = path;\n        const node = this.get(key, true);\n        if (rest.length === 0)\n            return !keepScalar && identity.isScalar(node) ? node.value : node;\n        else\n            return identity.isCollection(node) ? node.getIn(rest, keepScalar) : undefined;\n    }\n    hasAllNullValues(allowScalar) {\n        return this.items.every(node => {\n            if (!identity.isPair(node))\n                return false;\n            const n = node.value;\n            return (n == null ||\n                (allowScalar &&\n                    identity.isScalar(n) &&\n                    n.value == null &&\n                    !n.commentBefore &&\n                    !n.comment &&\n                    !n.tag));\n        });\n    }\n    /**\n     * Checks if the collection includes a value with the key `key`.\n     */\n    hasIn(path) {\n        const [key, ...rest] = path;\n        if (rest.length === 0)\n            return this.has(key);\n        const node = this.get(key, true);\n        return identity.isCollection(node) ? node.hasIn(rest) : false;\n    }\n    /**\n     * Sets a value in this collection. For `!!set`, `value` needs to be a\n     * boolean to add/remove the item from the set.\n     */\n    setIn(path, value) {\n        const [key, ...rest] = path;\n        if (rest.length === 0) {\n            this.set(key, value);\n        }\n        else {\n            const node = this.get(key, true);\n            if (identity.isCollection(node))\n                node.setIn(rest, value);\n            else if (node === undefined && this.schema)\n                this.set(key, collectionFromPath(this.schema, rest, value));\n            else\n                throw new Error(`Expected YAML collection at ${key}. Remaining path: ${rest}`);\n        }\n    }\n}\n\nexports.Collection = Collection;\nexports.collectionFromPath = collectionFromPath;\nexports.isEmptyPath = isEmptyPath;\n","'use strict';\n\nvar applyReviver = require('../doc/applyReviver.js');\nvar identity = require('./identity.js');\nvar toJS = require('./toJS.js');\n\nclass NodeBase {\n    constructor(type) {\n        Object.defineProperty(this, identity.NODE_TYPE, { value: type });\n    }\n    /** Create a copy of this node.  */\n    clone() {\n        const copy = Object.create(Object.getPrototypeOf(this), Object.getOwnPropertyDescriptors(this));\n        if (this.range)\n            copy.range = this.range.slice();\n        return copy;\n    }\n    /** A plain JavaScript representation of this node. */\n    toJS(doc, { mapAsMap, maxAliasCount, onAnchor, reviver } = {}) {\n        if (!identity.isDocument(doc))\n            throw new TypeError('A document argument is required');\n        const ctx = {\n            anchors: new Map(),\n            doc,\n            keep: true,\n            mapAsMap: mapAsMap === true,\n            mapKeyWarned: false,\n            maxAliasCount: typeof maxAliasCount === 'number' ? maxAliasCount : 100\n        };\n        const res = toJS.toJS(this, '', ctx);\n        if (typeof onAnchor === 'function')\n            for (const { count, res } of ctx.anchors.values())\n                onAnchor(res, count);\n        return typeof reviver === 'function'\n            ? applyReviver.applyReviver(reviver, { '': res }, '', res)\n            : res;\n    }\n}\n\nexports.NodeBase = NodeBase;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyPair = require('../stringify/stringifyPair.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar identity = require('./identity.js');\n\nfunction createPair(key, value, ctx) {\n    const k = createNode.createNode(key, undefined, ctx);\n    const v = createNode.createNode(value, undefined, ctx);\n    return new Pair(k, v);\n}\nclass Pair {\n    constructor(key, value = null) {\n        Object.defineProperty(this, identity.NODE_TYPE, { value: identity.PAIR });\n        this.key = key;\n        this.value = value;\n    }\n    clone(schema) {\n        let { key, value } = this;\n        if (identity.isNode(key))\n            key = key.clone(schema);\n        if (identity.isNode(value))\n            value = value.clone(schema);\n        return new Pair(key, value);\n    }\n    toJSON(_, ctx) {\n        const pair = ctx?.mapAsMap ? new Map() : {};\n        return addPairToJSMap.addPairToJSMap(ctx, pair, this);\n    }\n    toString(ctx, onComment, onChompKeep) {\n        return ctx?.doc\n            ? stringifyPair.stringifyPair(this, ctx, onComment, onChompKeep)\n            : JSON.stringify(this);\n    }\n}\n\nexports.Pair = Pair;\nexports.createPair = createPair;\n","'use strict';\n\nvar identity = require('./identity.js');\nvar Node = require('./Node.js');\nvar toJS = require('./toJS.js');\n\nconst isScalarValue = (value) => !value || (typeof value !== 'function' && typeof value !== 'object');\nclass Scalar extends Node.NodeBase {\n    constructor(value) {\n        super(identity.SCALAR);\n        this.value = value;\n    }\n    toJSON(arg, ctx) {\n        return ctx?.keep ? this.value : toJS.toJS(this.value, arg, ctx);\n    }\n    toString() {\n        return String(this.value);\n    }\n}\nScalar.BLOCK_FOLDED = 'BLOCK_FOLDED';\nScalar.BLOCK_LITERAL = 'BLOCK_LITERAL';\nScalar.PLAIN = 'PLAIN';\nScalar.QUOTE_DOUBLE = 'QUOTE_DOUBLE';\nScalar.QUOTE_SINGLE = 'QUOTE_SINGLE';\n\nexports.Scalar = Scalar;\nexports.isScalarValue = isScalarValue;\n","'use strict';\n\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar addPairToJSMap = require('./addPairToJSMap.js');\nvar Collection = require('./Collection.js');\nvar identity = require('./identity.js');\nvar Pair = require('./Pair.js');\nvar Scalar = require('./Scalar.js');\n\nfunction findPair(items, key) {\n    const k = identity.isScalar(key) ? key.value : key;\n    for (const it of items) {\n        if (identity.isPair(it)) {\n            if (it.key === key || it.key === k)\n                return it;\n            if (identity.isScalar(it.key) && it.key.value === k)\n                return it;\n        }\n    }\n    return undefined;\n}\nclass YAMLMap extends Collection.Collection {\n    static get tagName() {\n        return 'tag:yaml.org,2002:map';\n    }\n    constructor(schema) {\n        super(identity.MAP, schema);\n        this.items = [];\n    }\n    /**\n     * A generic collection parsing method that can be extended\n     * to other node classes that inherit from YAMLMap\n     */\n    static from(schema, obj, ctx) {\n        const { keepUndefined, replacer } = ctx;\n        const map = new this(schema);\n        const add = (key, value) => {\n            if (typeof replacer === 'function')\n                value = replacer.call(obj, key, value);\n            else if (Array.isArray(replacer) && !replacer.includes(key))\n                return;\n            if (value !== undefined || keepUndefined)\n                map.items.push(Pair.createPair(key, value, ctx));\n        };\n        if (obj instanceof Map) {\n            for (const [key, value] of obj)\n                add(key, value);\n        }\n        else if (obj && typeof obj === 'object') {\n            for (const key of Object.keys(obj))\n                add(key, obj[key]);\n        }\n        if (typeof schema.sortMapEntries === 'function') {\n            map.items.sort(schema.sortMapEntries);\n        }\n        return map;\n    }\n    /**\n     * Adds a value to the collection.\n     *\n     * @param overwrite - If not set `true`, using a key that is already in the\n     *   collection will throw. Otherwise, overwrites the previous value.\n     */\n    add(pair, overwrite) {\n        let _pair;\n        if (identity.isPair(pair))\n            _pair = pair;\n        else if (!pair || typeof pair !== 'object' || !('key' in pair)) {\n            // In TypeScript, this never happens.\n            _pair = new Pair.Pair(pair, pair?.value);\n        }\n        else\n            _pair = new Pair.Pair(pair.key, pair.value);\n        const prev = findPair(this.items, _pair.key);\n        const sortEntries = this.schema?.sortMapEntries;\n        if (prev) {\n            if (!overwrite)\n                throw new Error(`Key ${_pair.key} already set`);\n            // For scalars, keep the old node & its comments and anchors\n            if (identity.isScalar(prev.value) && Scalar.isScalarValue(_pair.value))\n                prev.value.value = _pair.value;\n            else\n                prev.value = _pair.value;\n        }\n        else if (sortEntries) {\n            const i = this.items.findIndex(item => sortEntries(_pair, item) < 0);\n            if (i === -1)\n                this.items.push(_pair);\n            else\n                this.items.splice(i, 0, _pair);\n        }\n        else {\n            this.items.push(_pair);\n        }\n    }\n    delete(key) {\n        const it = findPair(this.items, key);\n        if (!it)\n            return false;\n        const del = this.items.splice(this.items.indexOf(it), 1);\n        return del.length > 0;\n    }\n    get(key, keepScalar) {\n        const it = findPair(this.items, key);\n        const node = it?.value;\n        return (!keepScalar && identity.isScalar(node) ? node.value : node) ?? undefined;\n    }\n    has(key) {\n        return !!findPair(this.items, key);\n    }\n    set(key, value) {\n        this.add(new Pair.Pair(key, value), true);\n    }\n    /**\n     * @param ctx - Conversion context, originally set in Document#toJS()\n     * @param {Class} Type - If set, forces the returned collection type\n     * @returns Instance of Type, Map, or Object\n     */\n    toJSON(_, ctx, Type) {\n        const map = Type ? new Type() : ctx?.mapAsMap ? new Map() : {};\n        if (ctx?.onCreate)\n            ctx.onCreate(map);\n        for (const item of this.items)\n            addPairToJSMap.addPairToJSMap(ctx, map, item);\n        return map;\n    }\n    toString(ctx, onComment, onChompKeep) {\n        if (!ctx)\n            return JSON.stringify(this);\n        for (const item of this.items) {\n            if (!identity.isPair(item))\n                throw new Error(`Map items must all be pairs; found ${JSON.stringify(item)} instead`);\n        }\n        if (!ctx.allNullValues && this.hasAllNullValues(false))\n            ctx = Object.assign({}, ctx, { allNullValues: true });\n        return stringifyCollection.stringifyCollection(this, ctx, {\n            blockItemPrefix: '',\n            flowChars: { start: '{', end: '}' },\n            itemIndent: ctx.indent || '',\n            onChompKeep,\n            onComment\n        });\n    }\n}\n\nexports.YAMLMap = YAMLMap;\nexports.findPair = findPair;\n","'use strict';\n\nvar createNode = require('../doc/createNode.js');\nvar stringifyCollection = require('../stringify/stringifyCollection.js');\nvar Collection = require('./Collection.js');\nvar identity = require('./identity.js');\nvar Scalar = require('./Scalar.js');\nvar toJS = require('./toJS.js');\n\nclass YAMLSeq extends Collection.Collection {\n    static get tagName() {\n        return 'tag:yaml.org,2002:seq';\n    }\n    constructor(schema) {\n        super(identity.SEQ, schema);\n        this.items = [];\n    }\n    add(value) {\n        this.items.push(value);\n    }\n    /**\n     * Removes a value from the collection.\n     *\n     * `key` must contain a representation of an integer for this to succeed.\n     * It may be wrapped in a `Scalar`.\n     *\n     * @returns `true` if the item was found and removed.\n     */\n    delete(key) {\n        const idx = asItemIndex(key);\n        if (typeof idx !== 'number')\n            return false;\n        const del = this.items.splice(idx, 1);\n        return del.length > 0;\n    }\n    get(key, keepScalar) {\n        const idx = asItemIndex(key);\n        if (typeof idx !== 'number')\n            return undefined;\n        const it = this.items[idx];\n        return !keepScalar && identity.isScalar(it) ? it.value : it;\n    }\n    /**\n     * Checks if the collection includes a value with the key `key`.\n     *\n     * `key` must contain a representation of an integer for this to succeed.\n     * It may be wrapped in a `Scalar`.\n     */\n    has(key) {\n        const idx = asItemIndex(key);\n        return typeof idx === 'number' && idx < this.items.length;\n    }\n    /**\n     * Sets a value in this collection. For `!!set`, `value` needs to be a\n     * boolean to add/remove the item from the set.\n     *\n     * If `key` does not contain a representation of an integer, this will throw.\n     * It may be wrapped in a `Scalar`.\n     */\n    set(key, value) {\n        const idx = asItemIndex(key);\n        if (typeof idx !== 'number')\n            throw new Error(`Expected a valid index, not ${key}.`);\n        const prev = this.items[idx];\n        if (identity.isScalar(prev) && Scalar.isScalarValue(value))\n            prev.value = value;\n        else\n            this.items[idx] = value;\n    }\n    toJSON(_, ctx) {\n        const seq = [];\n        if (ctx?.onCreate)\n            ctx.onCreate(seq);\n        let i = 0;\n        for (const item of this.items)\n            seq.push(toJS.toJS(item, String(i++), ctx));\n        return seq;\n    }\n    toString(ctx, onComment, onChompKeep) {\n        if (!ctx)\n            return JSON.stringify(this);\n        return stringifyCollection.stringifyCollection(this, ctx, {\n            blockItemPrefix: '- ',\n            flowChars: { start: '[', end: ']' },\n            itemIndent: (ctx.indent || '') + '  ',\n            onChompKeep,\n            onComment\n        });\n    }\n    static from(schema, obj, ctx) {\n        const { replacer } = ctx;\n        const seq = new this(schema);\n        if (obj && Symbol.iterator in Object(obj)) {\n            let i = 0;\n            for (let it of obj) {\n                if (typeof replacer === 'function') {\n                    const key = obj instanceof Set ? it : String(i++);\n                    it = replacer.call(obj, key, it);\n                }\n                seq.items.push(createNode.createNode(it, undefined, ctx));\n            }\n        }\n        return seq;\n    }\n}\nfunction asItemIndex(key) {\n    let idx = identity.isScalar(key) ? key.value : key;\n    if (idx && typeof idx === 'string')\n        idx = Number(idx);\n    return typeof idx === 'number' && Number.isInteger(idx) && idx >= 0\n        ? idx\n        : null;\n}\n\nexports.YAMLSeq = YAMLSeq;\n","'use strict';\n\nvar log = require('../log.js');\nvar merge = require('../schema/yaml-1.1/merge.js');\nvar stringify = require('../stringify/stringify.js');\nvar identity = require('./identity.js');\nvar toJS = require('./toJS.js');\n\nfunction addPairToJSMap(ctx, map, { key, value }) {\n    if (identity.isNode(key) && key.addToJSMap)\n        key.addToJSMap(ctx, map, value);\n    // TODO: Should drop this special case for bare << handling\n    else if (merge.isMergeKey(ctx, key))\n        merge.addMergeToJSMap(ctx, map, value);\n    else {\n        const jsKey = toJS.toJS(key, '', ctx);\n        if (map instanceof Map) {\n            map.set(jsKey, toJS.toJS(value, jsKey, ctx));\n        }\n        else if (map instanceof Set) {\n            map.add(jsKey);\n        }\n        else {\n            const stringKey = stringifyKey(key, jsKey, ctx);\n            const jsValue = toJS.toJS(value, stringKey, ctx);\n            if (stringKey in map)\n                Object.defineProperty(map, stringKey, {\n                    value: jsValue,\n                    writable: true,\n                    enumerable: true,\n                    configurable: true\n                });\n            else\n                map[stringKey] = jsValue;\n        }\n    }\n    return map;\n}\nfunction stringifyKey(key, jsKey, ctx) {\n    if (jsKey === null)\n        return '';\n    if (typeof jsKey !== 'object')\n        return String(jsKey);\n    if (identity.isNode(key) && ctx?.doc) {\n        const strCtx = stringify.createStringifyContext(ctx.doc, {});\n        strCtx.anchors = new Set();\n        for (const node of ctx.anchors.keys())\n            strCtx.anchors.add(node.anchor);\n        strCtx.inFlow = true;\n        strCtx.inStringifyKey = true;\n        const strKey = key.toString(strCtx);\n        if (!ctx.mapKeyWarned) {\n            let jsonStr = JSON.stringify(strKey);\n            if (jsonStr.length > 40)\n                jsonStr = jsonStr.substring(0, 36) + '...\"';\n            log.warn(ctx.doc.options.logLevel, `Keys with collection values will be stringified due to JS Object restrictions: ${jsonStr}. Set mapAsMap: true to use object keys.`);\n            ctx.mapKeyWarned = true;\n        }\n        return strKey;\n    }\n    return JSON.stringify(jsKey);\n}\n\nexports.addPairToJSMap = addPairToJSMap;\n","'use strict';\n\nconst ALIAS = Symbol.for('yaml.alias');\nconst DOC = Symbol.for('yaml.document');\nconst MAP = Symbol.for('yaml.map');\nconst PAIR = Symbol.for('yaml.pair');\nconst SCALAR = Symbol.for('yaml.scalar');\nconst SEQ = Symbol.for('yaml.seq');\nconst NODE_TYPE = Symbol.for('yaml.node.type');\nconst isAlias = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === ALIAS;\nconst isDocument = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === DOC;\nconst isMap = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === MAP;\nconst isPair = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === PAIR;\nconst isScalar = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SCALAR;\nconst isSeq = (node) => !!node && typeof node === 'object' && node[NODE_TYPE] === SEQ;\nfunction isCollection(node) {\n    if (node && typeof node === 'object')\n        switch (node[NODE_TYPE]) {\n            case MAP:\n            case SEQ:\n                return true;\n        }\n    return false;\n}\nfunction isNode(node) {\n    if (node && typeof node === 'object')\n        switch (node[NODE_TYPE]) {\n            case ALIAS:\n            case MAP:\n            case SCALAR:\n            case SEQ:\n                return true;\n        }\n    return false;\n}\nconst hasAnchor = (node) => (isScalar(node) || isCollection(node)) && !!node.anchor;\n\nexports.ALIAS = ALIAS;\nexports.DOC = DOC;\nexports.MAP = MAP;\nexports.NODE_TYPE = NODE_TYPE;\nexports.PAIR = PAIR;\nexports.SCALAR = SCALAR;\nexports.SEQ = SEQ;\nexports.hasAnchor = hasAnchor;\nexports.isAlias = isAlias;\nexports.isCollection = isCollection;\nexports.isDocument = isDocument;\nexports.isMap = isMap;\nexports.isNode = isNode;\nexports.isPair = isPair;\nexports.isScalar = isScalar;\nexports.isSeq = isSeq;\n","'use strict';\n\nvar identity = require('./identity.js');\n\n/**\n * Recursively convert any node or its contents to native JavaScript\n *\n * @param value - The input value\n * @param arg - If `value` defines a `toJSON()` method, use this\n *   as its first argument\n * @param ctx - Conversion context, originally set in Document#toJS(). If\n *   `{ keep: true }` is not set, output should be suitable for JSON\n *   stringification.\n */\nfunction toJS(value, arg, ctx) {\n    // eslint-disable-next-line @typescript-eslint/no-unsafe-return\n    if (Array.isArray(value))\n        return value.map((v, i) => toJS(v, String(i), ctx));\n    if (value && typeof value.toJSON === 'function') {\n        // eslint-disable-next-line @typescript-eslint/no-unsafe-call\n        if (!ctx || !identity.hasAnchor(value))\n            return value.toJSON(arg, ctx);\n        const data = { aliasCount: 0, count: 1, res: undefined };\n        ctx.anchors.set(value, data);\n        ctx.onCreate = res => {\n            data.res = res;\n            delete ctx.onCreate;\n        };\n        const res = value.toJSON(arg, ctx);\n        if (ctx.onCreate)\n            ctx.onCreate(res);\n        return res;\n    }\n    if (typeof value === 'bigint' && !ctx?.keep)\n        return Number(value);\n    return value;\n}\n\nexports.toJS = toJS;\n","'use strict';\n\nvar resolveBlockScalar = require('../compose/resolve-block-scalar.js');\nvar resolveFlowScalar = require('../compose/resolve-flow-scalar.js');\nvar errors = require('../errors.js');\nvar stringifyString = require('../stringify/stringifyString.js');\n\nfunction resolveAsScalar(token, strict = true, onError) {\n    if (token) {\n        const _onError = (pos, code, message) => {\n            const offset = typeof pos === 'number' ? pos : Array.isArray(pos) ? pos[0] : pos.offset;\n            if (onError)\n                onError(offset, code, message);\n            else\n                throw new errors.YAMLParseError([offset, offset + 1], code, message);\n        };\n        switch (token.type) {\n            case 'scalar':\n            case 'single-quoted-scalar':\n            case 'double-quoted-scalar':\n                return resolveFlowScalar.resolveFlowScalar(token, strict, _onError);\n            case 'block-scalar':\n                return resolveBlockScalar.resolveBlockScalar({ options: { strict } }, token, _onError);\n        }\n    }\n    return null;\n}\n/**\n * Create a new scalar token with `value`\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.end Comments and whitespace after the end of the value, or after the block scalar header. If undefined, a newline will be added.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.indent The indent level of the token.\n * @param context.inFlow Is this scalar within a flow collection? This may affect the resolved type of the token's value.\n * @param context.offset The offset position of the token.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction createScalarToken(value, context) {\n    const { implicitKey = false, indent, inFlow = false, offset = -1, type = 'PLAIN' } = context;\n    const source = stringifyString.stringifyString({ type, value }, {\n        implicitKey,\n        indent: indent > 0 ? ' '.repeat(indent) : '',\n        inFlow,\n        options: { blockQuote: true, lineWidth: -1 }\n    });\n    const end = context.end ?? [\n        { type: 'newline', offset: -1, indent, source: '\\n' }\n    ];\n    switch (source[0]) {\n        case '|':\n        case '>': {\n            const he = source.indexOf('\\n');\n            const head = source.substring(0, he);\n            const body = source.substring(he + 1) + '\\n';\n            const props = [\n                { type: 'block-scalar-header', offset, indent, source: head }\n            ];\n            if (!addEndtoBlockProps(props, end))\n                props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n            return { type: 'block-scalar', offset, indent, props, source: body };\n        }\n        case '\"':\n            return { type: 'double-quoted-scalar', offset, indent, source, end };\n        case \"'\":\n            return { type: 'single-quoted-scalar', offset, indent, source, end };\n        default:\n            return { type: 'scalar', offset, indent, source, end };\n    }\n}\n/**\n * Set the value of `token` to the given string `value`, overwriting any previous contents and type that it may have.\n *\n * Best efforts are made to retain any comments previously associated with the `token`,\n * though all contents within a collection's `items` will be overwritten.\n *\n * Values that represent an actual string but may be parsed as a different type should use a `type` other than `'PLAIN'`,\n * as this function does not support any schema operations and won't check for such conflicts.\n *\n * @param token Any token. If it does not include an `indent` value, the value will be stringified as if it were an implicit key.\n * @param value The string representation of the value, which will have its content properly indented.\n * @param context.afterKey In most cases, values after a key should have an additional level of indentation.\n * @param context.implicitKey Being within an implicit key may affect the resolved type of the token's value.\n * @param context.inFlow Being within a flow collection may affect the resolved type of the token's value.\n * @param context.type The preferred type of the scalar token. If undefined, the previous type of the `token` will be used, defaulting to `'PLAIN'`.\n */\nfunction setScalarValue(token, value, context = {}) {\n    let { afterKey = false, implicitKey = false, inFlow = false, type } = context;\n    let indent = 'indent' in token ? token.indent : null;\n    if (afterKey && typeof indent === 'number')\n        indent += 2;\n    if (!type)\n        switch (token.type) {\n            case 'single-quoted-scalar':\n                type = 'QUOTE_SINGLE';\n                break;\n            case 'double-quoted-scalar':\n                type = 'QUOTE_DOUBLE';\n                break;\n            case 'block-scalar': {\n                const header = token.props[0];\n                if (header.type !== 'block-scalar-header')\n                    throw new Error('Invalid block scalar header');\n                type = header.source[0] === '>' ? 'BLOCK_FOLDED' : 'BLOCK_LITERAL';\n                break;\n            }\n            default:\n                type = 'PLAIN';\n        }\n    const source = stringifyString.stringifyString({ type, value }, {\n        implicitKey: implicitKey || indent === null,\n        indent: indent !== null && indent > 0 ? ' '.repeat(indent) : '',\n        inFlow,\n        options: { blockQuote: true, lineWidth: -1 }\n    });\n    switch (source[0]) {\n        case '|':\n        case '>':\n            setBlockScalarValue(token, source);\n            break;\n        case '\"':\n            setFlowScalarValue(token, source, 'double-quoted-scalar');\n            break;\n        case \"'\":\n            setFlowScalarValue(token, source, 'single-quoted-scalar');\n            break;\n        default:\n            setFlowScalarValue(token, source, 'scalar');\n    }\n}\nfunction setBlockScalarValue(token, source) {\n    const he = source.indexOf('\\n');\n    const head = source.substring(0, he);\n    const body = source.substring(he + 1) + '\\n';\n    if (token.type === 'block-scalar') {\n        const header = token.props[0];\n        if (header.type !== 'block-scalar-header')\n            throw new Error('Invalid block scalar header');\n        header.source = head;\n        token.source = body;\n    }\n    else {\n        const { offset } = token;\n        const indent = 'indent' in token ? token.indent : -1;\n        const props = [\n            { type: 'block-scalar-header', offset, indent, source: head }\n        ];\n        if (!addEndtoBlockProps(props, 'end' in token ? token.end : undefined))\n            props.push({ type: 'newline', offset: -1, indent, source: '\\n' });\n        for (const key of Object.keys(token))\n            if (key !== 'type' && key !== 'offset')\n                delete token[key];\n        Object.assign(token, { type: 'block-scalar', indent, props, source: body });\n    }\n}\n/** @returns `true` if last token is a newline */\nfunction addEndtoBlockProps(props, end) {\n    if (end)\n        for (const st of end)\n            switch (st.type) {\n                case 'space':\n                case 'comment':\n                    props.push(st);\n                    break;\n                case 'newline':\n                    props.push(st);\n                    return true;\n            }\n    return false;\n}\nfunction setFlowScalarValue(token, source, type) {\n    switch (token.type) {\n        case 'scalar':\n        case 'double-quoted-scalar':\n        case 'single-quoted-scalar':\n            token.type = type;\n            token.source = source;\n            break;\n        case 'block-scalar': {\n            const end = token.props.slice(1);\n            let oa = source.length;\n            if (token.props[0].type === 'block-scalar-header')\n                oa -= token.props[0].source.length;\n            for (const tok of end)\n                tok.offset += oa;\n            delete token.props;\n            Object.assign(token, { type, source, end });\n            break;\n        }\n        case 'block-map':\n        case 'block-seq': {\n            const offset = token.offset + source.length;\n            const nl = { type: 'newline', offset, indent: token.indent, source: '\\n' };\n            delete token.items;\n            Object.assign(token, { type, source, end: [nl] });\n            break;\n        }\n        default: {\n            const indent = 'indent' in token ? token.indent : -1;\n            const end = 'end' in token && Array.isArray(token.end)\n                ? token.end.filter(st => st.type === 'space' ||\n                    st.type === 'comment' ||\n                    st.type === 'newline')\n                : [];\n            for (const key of Object.keys(token))\n                if (key !== 'type' && key !== 'offset')\n                    delete token[key];\n            Object.assign(token, { type, indent, source, end });\n        }\n    }\n}\n\nexports.createScalarToken = createScalarToken;\nexports.resolveAsScalar = resolveAsScalar;\nexports.setScalarValue = setScalarValue;\n","'use strict';\n\n/**\n * Stringify a CST document, token, or collection item\n *\n * Fair warning: This applies no validation whatsoever, and\n * simply concatenates the sources in their logical order.\n */\nconst stringify = (cst) => 'type' in cst ? stringifyToken(cst) : stringifyItem(cst);\nfunction stringifyToken(token) {\n    switch (token.type) {\n        case 'block-scalar': {\n            let res = '';\n            for (const tok of token.props)\n                res += stringifyToken(tok);\n            return res + token.source;\n        }\n        case 'block-map':\n        case 'block-seq': {\n            let res = '';\n            for (const item of token.items)\n                res += stringifyItem(item);\n            return res;\n        }\n        case 'flow-collection': {\n            let res = token.start.source;\n            for (const item of token.items)\n                res += stringifyItem(item);\n            for (const st of token.end)\n                res += st.source;\n            return res;\n        }\n        case 'document': {\n            let res = stringifyItem(token);\n            if (token.end)\n                for (const st of token.end)\n                    res += st.source;\n            return res;\n        }\n        default: {\n            let res = token.source;\n            if ('end' in token && token.end)\n                for (const st of token.end)\n                    res += st.source;\n            return res;\n        }\n    }\n}\nfunction stringifyItem({ start, key, sep, value }) {\n    let res = '';\n    for (const st of start)\n        res += st.source;\n    if (key)\n        res += stringifyToken(key);\n    if (sep)\n        for (const st of sep)\n            res += st.source;\n    if (value)\n        res += stringifyToken(value);\n    return res;\n}\n\nexports.stringify = stringify;\n","'use strict';\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove item');\n/**\n * Apply a visitor to a CST document or item.\n *\n * Walks through the tree (depth-first) starting from the root, calling a\n * `visitor` function with two arguments when entering each item:\n *   - `item`: The current item, which included the following members:\n *     - `start: SourceToken[]` – Source tokens before the key or value,\n *       possibly including its anchor or tag.\n *     - `key?: Token | null` – Set for pair values. May then be `null`, if\n *       the key before the `:` separator is empty.\n *     - `sep?: SourceToken[]` – Source tokens between the key and the value,\n *       which should include the `:` map value indicator if `value` is set.\n *     - `value?: Token` – The value of a sequence item, or of a map pair.\n *   - `path`: The steps from the root to the current node, as an array of\n *     `['key' | 'value', number]` tuples.\n *\n * The return value of the visitor may be used to control the traversal:\n *   - `undefined` (default): Do nothing and continue\n *   - `visit.SKIP`: Do not visit the children of this token, continue with\n *      next sibling\n *   - `visit.BREAK`: Terminate traversal completely\n *   - `visit.REMOVE`: Remove the current item, then continue with the next one\n *   - `number`: Set the index of the next step. This is useful especially if\n *     the index of the current token has changed.\n *   - `function`: Define the next visitor for this item. After the original\n *     visitor is called on item entry, next visitors are called after handling\n *     a non-empty `key` and when exiting the item.\n */\nfunction visit(cst, visitor) {\n    if ('type' in cst && cst.type === 'document')\n        cst = { start: cst.start, value: cst.value };\n    _visit(Object.freeze([]), cst, visitor);\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current item */\nvisit.SKIP = SKIP;\n/** Remove the current item */\nvisit.REMOVE = REMOVE;\n/** Find the item at `path` from `cst` as the root */\nvisit.itemAtPath = (cst, path) => {\n    let item = cst;\n    for (const [field, index] of path) {\n        const tok = item?.[field];\n        if (tok && 'items' in tok) {\n            item = tok.items[index];\n        }\n        else\n            return undefined;\n    }\n    return item;\n};\n/**\n * Get the immediate parent collection of the item at `path` from `cst` as the root.\n *\n * Throws an error if the collection is not found, which should never happen if the item itself exists.\n */\nvisit.parentCollection = (cst, path) => {\n    const parent = visit.itemAtPath(cst, path.slice(0, -1));\n    const field = path[path.length - 1][0];\n    const coll = parent?.[field];\n    if (coll && 'items' in coll)\n        return coll;\n    throw new Error('Parent collection not found');\n};\nfunction _visit(path, item, visitor) {\n    let ctrl = visitor(item, path);\n    if (typeof ctrl === 'symbol')\n        return ctrl;\n    for (const field of ['key', 'value']) {\n        const token = item[field];\n        if (token && 'items' in token) {\n            for (let i = 0; i < token.items.length; ++i) {\n                const ci = _visit(Object.freeze(path.concat([[field, i]])), token.items[i], visitor);\n                if (typeof ci === 'number')\n                    i = ci - 1;\n                else if (ci === BREAK)\n                    return BREAK;\n                else if (ci === REMOVE) {\n                    token.items.splice(i, 1);\n                    i -= 1;\n                }\n            }\n            if (typeof ctrl === 'function' && field === 'key')\n                ctrl = ctrl(item, path);\n        }\n    }\n    return typeof ctrl === 'function' ? ctrl(item, path) : ctrl;\n}\n\nexports.visit = visit;\n","'use strict';\n\nvar cstScalar = require('./cst-scalar.js');\nvar cstStringify = require('./cst-stringify.js');\nvar cstVisit = require('./cst-visit.js');\n\n/** The byte order mark */\nconst BOM = '\\u{FEFF}';\n/** Start of doc-mode */\nconst DOCUMENT = '\\x02'; // C0: Start of Text\n/** Unexpected end of flow-mode */\nconst FLOW_END = '\\x18'; // C0: Cancel\n/** Next token is a scalar value */\nconst SCALAR = '\\x1f'; // C0: Unit Separator\n/** @returns `true` if `token` is a flow or block collection */\nconst isCollection = (token) => !!token && 'items' in token;\n/** @returns `true` if `token` is a flow or block scalar; not an alias */\nconst isScalar = (token) => !!token &&\n    (token.type === 'scalar' ||\n        token.type === 'single-quoted-scalar' ||\n        token.type === 'double-quoted-scalar' ||\n        token.type === 'block-scalar');\n/* istanbul ignore next */\n/** Get a printable representation of a lexer token */\nfunction prettyToken(token) {\n    switch (token) {\n        case BOM:\n            return '';\n        case DOCUMENT:\n            return '';\n        case FLOW_END:\n            return '';\n        case SCALAR:\n            return '';\n        default:\n            return JSON.stringify(token);\n    }\n}\n/** Identify the type of a lexer token. May return `null` for unknown tokens. */\nfunction tokenType(source) {\n    switch (source) {\n        case BOM:\n            return 'byte-order-mark';\n        case DOCUMENT:\n            return 'doc-mode';\n        case FLOW_END:\n            return 'flow-error-end';\n        case SCALAR:\n            return 'scalar';\n        case '---':\n            return 'doc-start';\n        case '...':\n            return 'doc-end';\n        case '':\n        case '\\n':\n        case '\\r\\n':\n            return 'newline';\n        case '-':\n            return 'seq-item-ind';\n        case '?':\n            return 'explicit-key-ind';\n        case ':':\n            return 'map-value-ind';\n        case '{':\n            return 'flow-map-start';\n        case '}':\n            return 'flow-map-end';\n        case '[':\n            return 'flow-seq-start';\n        case ']':\n            return 'flow-seq-end';\n        case ',':\n            return 'comma';\n    }\n    switch (source[0]) {\n        case ' ':\n        case '\\t':\n            return 'space';\n        case '#':\n            return 'comment';\n        case '%':\n            return 'directive-line';\n        case '*':\n            return 'alias';\n        case '&':\n            return 'anchor';\n        case '!':\n            return 'tag';\n        case \"'\":\n            return 'single-quoted-scalar';\n        case '\"':\n            return 'double-quoted-scalar';\n        case '|':\n        case '>':\n            return 'block-scalar-header';\n    }\n    return null;\n}\n\nexports.createScalarToken = cstScalar.createScalarToken;\nexports.resolveAsScalar = cstScalar.resolveAsScalar;\nexports.setScalarValue = cstScalar.setScalarValue;\nexports.stringify = cstStringify.stringify;\nexports.visit = cstVisit.visit;\nexports.BOM = BOM;\nexports.DOCUMENT = DOCUMENT;\nexports.FLOW_END = FLOW_END;\nexports.SCALAR = SCALAR;\nexports.isCollection = isCollection;\nexports.isScalar = isScalar;\nexports.prettyToken = prettyToken;\nexports.tokenType = tokenType;\n","'use strict';\n\nvar cst = require('./cst.js');\n\n/*\nSTART -> stream\n\nstream\n  directive -> line-end -> stream\n  indent + line-end -> stream\n  [else] -> line-start\n\nline-end\n  comment -> line-end\n  newline -> .\n  input-end -> END\n\nline-start\n  doc-start -> doc\n  doc-end -> stream\n  [else] -> indent -> block-start\n\nblock-start\n  seq-item-start -> block-start\n  explicit-key-start -> block-start\n  map-value-start -> block-start\n  [else] -> doc\n\ndoc\n  line-end -> line-start\n  spaces -> doc\n  anchor -> doc\n  tag -> doc\n  flow-start -> flow -> doc\n  flow-end -> error -> doc\n  seq-item-start -> error -> doc\n  explicit-key-start -> error -> doc\n  map-value-start -> doc\n  alias -> doc\n  quote-start -> quoted-scalar -> doc\n  block-scalar-header -> line-end -> block-scalar(min) -> line-start\n  [else] -> plain-scalar(false, min) -> doc\n\nflow\n  line-end -> flow\n  spaces -> flow\n  anchor -> flow\n  tag -> flow\n  flow-start -> flow -> flow\n  flow-end -> .\n  seq-item-start -> error -> flow\n  explicit-key-start -> flow\n  map-value-start -> flow\n  alias -> flow\n  quote-start -> quoted-scalar -> flow\n  comma -> flow\n  [else] -> plain-scalar(true, 0) -> flow\n\nquoted-scalar\n  quote-end -> .\n  [else] -> quoted-scalar\n\nblock-scalar(min)\n  newline + peek(indent < min) -> .\n  [else] -> block-scalar(min)\n\nplain-scalar(is-flow, min)\n  scalar-end(is-flow) -> .\n  peek(newline + (indent < min)) -> .\n  [else] -> plain-scalar(min)\n*/\nfunction isEmpty(ch) {\n    switch (ch) {\n        case undefined:\n        case ' ':\n        case '\\n':\n        case '\\r':\n        case '\\t':\n            return true;\n        default:\n            return false;\n    }\n}\nconst hexDigits = new Set('0123456789ABCDEFabcdef');\nconst tagChars = new Set(\"0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz-#;/?:@&=+$_.!~*'()\");\nconst flowIndicatorChars = new Set(',[]{}');\nconst invalidAnchorChars = new Set(' ,[]{}\\n\\r\\t');\nconst isNotAnchorChar = (ch) => !ch || invalidAnchorChars.has(ch);\n/**\n * Splits an input string into lexical tokens, i.e. smaller strings that are\n * easily identifiable by `tokens.tokenType()`.\n *\n * Lexing starts always in a \"stream\" context. Incomplete input may be buffered\n * until a complete token can be emitted.\n *\n * In addition to slices of the original input, the following control characters\n * may also be emitted:\n *\n * - `\\x02` (Start of Text): A document starts with the next token\n * - `\\x18` (Cancel): Unexpected end of flow-mode (indicates an error)\n * - `\\x1f` (Unit Separator): Next token is a scalar value\n * - `\\u{FEFF}` (Byte order mark): Emitted separately outside documents\n */\nclass Lexer {\n    constructor() {\n        /**\n         * Flag indicating whether the end of the current buffer marks the end of\n         * all input\n         */\n        this.atEnd = false;\n        /**\n         * Explicit indent set in block scalar header, as an offset from the current\n         * minimum indent, so e.g. set to 1 from a header `|2+`. Set to -1 if not\n         * explicitly set.\n         */\n        this.blockScalarIndent = -1;\n        /**\n         * Block scalars that include a + (keep) chomping indicator in their header\n         * include trailing empty lines, which are otherwise excluded from the\n         * scalar's contents.\n         */\n        this.blockScalarKeep = false;\n        /** Current input */\n        this.buffer = '';\n        /**\n         * Flag noting whether the map value indicator : can immediately follow this\n         * node within a flow context.\n         */\n        this.flowKey = false;\n        /** Count of surrounding flow collection levels. */\n        this.flowLevel = 0;\n        /**\n         * Minimum level of indentation required for next lines to be parsed as a\n         * part of the current scalar value.\n         */\n        this.indentNext = 0;\n        /** Indentation level of the current line. */\n        this.indentValue = 0;\n        /** Position of the next \\n character. */\n        this.lineEndPos = null;\n        /** Stores the state of the lexer if reaching the end of incpomplete input */\n        this.next = null;\n        /** A pointer to `buffer`; the current position of the lexer. */\n        this.pos = 0;\n    }\n    /**\n     * Generate YAML tokens from the `source` string. If `incomplete`,\n     * a part of the last line may be left as a buffer for the next call.\n     *\n     * @returns A generator of lexical tokens\n     */\n    *lex(source, incomplete = false) {\n        if (source) {\n            if (typeof source !== 'string')\n                throw TypeError('source is not a string');\n            this.buffer = this.buffer ? this.buffer + source : source;\n            this.lineEndPos = null;\n        }\n        this.atEnd = !incomplete;\n        let next = this.next ?? 'stream';\n        while (next && (incomplete || this.hasChars(1)))\n            next = yield* this.parseNext(next);\n    }\n    atLineEnd() {\n        let i = this.pos;\n        let ch = this.buffer[i];\n        while (ch === ' ' || ch === '\\t')\n            ch = this.buffer[++i];\n        if (!ch || ch === '#' || ch === '\\n')\n            return true;\n        if (ch === '\\r')\n            return this.buffer[i + 1] === '\\n';\n        return false;\n    }\n    charAt(n) {\n        return this.buffer[this.pos + n];\n    }\n    continueScalar(offset) {\n        let ch = this.buffer[offset];\n        if (this.indentNext > 0) {\n            let indent = 0;\n            while (ch === ' ')\n                ch = this.buffer[++indent + offset];\n            if (ch === '\\r') {\n                const next = this.buffer[indent + offset + 1];\n                if (next === '\\n' || (!next && !this.atEnd))\n                    return offset + indent + 1;\n            }\n            return ch === '\\n' || indent >= this.indentNext || (!ch && !this.atEnd)\n                ? offset + indent\n                : -1;\n        }\n        if (ch === '-' || ch === '.') {\n            const dt = this.buffer.substr(offset, 3);\n            if ((dt === '---' || dt === '...') && isEmpty(this.buffer[offset + 3]))\n                return -1;\n        }\n        return offset;\n    }\n    getLine() {\n        let end = this.lineEndPos;\n        if (typeof end !== 'number' || (end !== -1 && end < this.pos)) {\n            end = this.buffer.indexOf('\\n', this.pos);\n            this.lineEndPos = end;\n        }\n        if (end === -1)\n            return this.atEnd ? this.buffer.substring(this.pos) : null;\n        if (this.buffer[end - 1] === '\\r')\n            end -= 1;\n        return this.buffer.substring(this.pos, end);\n    }\n    hasChars(n) {\n        return this.pos + n <= this.buffer.length;\n    }\n    setNext(state) {\n        this.buffer = this.buffer.substring(this.pos);\n        this.pos = 0;\n        this.lineEndPos = null;\n        this.next = state;\n        return null;\n    }\n    peek(n) {\n        return this.buffer.substr(this.pos, n);\n    }\n    *parseNext(next) {\n        switch (next) {\n            case 'stream':\n                return yield* this.parseStream();\n            case 'line-start':\n                return yield* this.parseLineStart();\n            case 'block-start':\n                return yield* this.parseBlockStart();\n            case 'doc':\n                return yield* this.parseDocument();\n            case 'flow':\n                return yield* this.parseFlowCollection();\n            case 'quoted-scalar':\n                return yield* this.parseQuotedScalar();\n            case 'block-scalar':\n                return yield* this.parseBlockScalar();\n            case 'plain-scalar':\n                return yield* this.parsePlainScalar();\n        }\n    }\n    *parseStream() {\n        let line = this.getLine();\n        if (line === null)\n            return this.setNext('stream');\n        if (line[0] === cst.BOM) {\n            yield* this.pushCount(1);\n            line = line.substring(1);\n        }\n        if (line[0] === '%') {\n            let dirEnd = line.length;\n            let cs = line.indexOf('#');\n            while (cs !== -1) {\n                const ch = line[cs - 1];\n                if (ch === ' ' || ch === '\\t') {\n                    dirEnd = cs - 1;\n                    break;\n                }\n                else {\n                    cs = line.indexOf('#', cs + 1);\n                }\n            }\n            while (true) {\n                const ch = line[dirEnd - 1];\n                if (ch === ' ' || ch === '\\t')\n                    dirEnd -= 1;\n                else\n                    break;\n            }\n            const n = (yield* this.pushCount(dirEnd)) + (yield* this.pushSpaces(true));\n            yield* this.pushCount(line.length - n); // possible comment\n            this.pushNewline();\n            return 'stream';\n        }\n        if (this.atLineEnd()) {\n            const sp = yield* this.pushSpaces(true);\n            yield* this.pushCount(line.length - sp);\n            yield* this.pushNewline();\n            return 'stream';\n        }\n        yield cst.DOCUMENT;\n        return yield* this.parseLineStart();\n    }\n    *parseLineStart() {\n        const ch = this.charAt(0);\n        if (!ch && !this.atEnd)\n            return this.setNext('line-start');\n        if (ch === '-' || ch === '.') {\n            if (!this.atEnd && !this.hasChars(4))\n                return this.setNext('line-start');\n            const s = this.peek(3);\n            if ((s === '---' || s === '...') && isEmpty(this.charAt(3))) {\n                yield* this.pushCount(3);\n                this.indentValue = 0;\n                this.indentNext = 0;\n                return s === '---' ? 'doc' : 'stream';\n            }\n        }\n        this.indentValue = yield* this.pushSpaces(false);\n        if (this.indentNext > this.indentValue && !isEmpty(this.charAt(1)))\n            this.indentNext = this.indentValue;\n        return yield* this.parseBlockStart();\n    }\n    *parseBlockStart() {\n        const [ch0, ch1] = this.peek(2);\n        if (!ch1 && !this.atEnd)\n            return this.setNext('block-start');\n        if ((ch0 === '-' || ch0 === '?' || ch0 === ':') && isEmpty(ch1)) {\n            const n = (yield* this.pushCount(1)) + (yield* this.pushSpaces(true));\n            this.indentNext = this.indentValue + 1;\n            this.indentValue += n;\n            return yield* this.parseBlockStart();\n        }\n        return 'doc';\n    }\n    *parseDocument() {\n        yield* this.pushSpaces(true);\n        const line = this.getLine();\n        if (line === null)\n            return this.setNext('doc');\n        let n = yield* this.pushIndicators();\n        switch (line[n]) {\n            case '#':\n                yield* this.pushCount(line.length - n);\n            // fallthrough\n            case undefined:\n                yield* this.pushNewline();\n                return yield* this.parseLineStart();\n            case '{':\n            case '[':\n                yield* this.pushCount(1);\n                this.flowKey = false;\n                this.flowLevel = 1;\n                return 'flow';\n            case '}':\n            case ']':\n                // this is an error\n                yield* this.pushCount(1);\n                return 'doc';\n            case '*':\n                yield* this.pushUntil(isNotAnchorChar);\n                return 'doc';\n            case '\"':\n            case \"'\":\n                return yield* this.parseQuotedScalar();\n            case '|':\n            case '>':\n                n += yield* this.parseBlockScalarHeader();\n                n += yield* this.pushSpaces(true);\n                yield* this.pushCount(line.length - n);\n                yield* this.pushNewline();\n                return yield* this.parseBlockScalar();\n            default:\n                return yield* this.parsePlainScalar();\n        }\n    }\n    *parseFlowCollection() {\n        let nl, sp;\n        let indent = -1;\n        do {\n            nl = yield* this.pushNewline();\n            if (nl > 0) {\n                sp = yield* this.pushSpaces(false);\n                this.indentValue = indent = sp;\n            }\n            else {\n                sp = 0;\n            }\n            sp += yield* this.pushSpaces(true);\n        } while (nl + sp > 0);\n        const line = this.getLine();\n        if (line === null)\n            return this.setNext('flow');\n        if ((indent !== -1 && indent < this.indentNext && line[0] !== '#') ||\n            (indent === 0 &&\n                (line.startsWith('---') || line.startsWith('...')) &&\n                isEmpty(line[3]))) {\n            // Allowing for the terminal ] or } at the same (rather than greater)\n            // indent level as the initial [ or { is technically invalid, but\n            // failing here would be surprising to users.\n            const atFlowEndMarker = indent === this.indentNext - 1 &&\n                this.flowLevel === 1 &&\n                (line[0] === ']' || line[0] === '}');\n            if (!atFlowEndMarker) {\n                // this is an error\n                this.flowLevel = 0;\n                yield cst.FLOW_END;\n                return yield* this.parseLineStart();\n            }\n        }\n        let n = 0;\n        while (line[n] === ',') {\n            n += yield* this.pushCount(1);\n            n += yield* this.pushSpaces(true);\n            this.flowKey = false;\n        }\n        n += yield* this.pushIndicators();\n        switch (line[n]) {\n            case undefined:\n                return 'flow';\n            case '#':\n                yield* this.pushCount(line.length - n);\n                return 'flow';\n            case '{':\n            case '[':\n                yield* this.pushCount(1);\n                this.flowKey = false;\n                this.flowLevel += 1;\n                return 'flow';\n            case '}':\n            case ']':\n                yield* this.pushCount(1);\n                this.flowKey = true;\n                this.flowLevel -= 1;\n                return this.flowLevel ? 'flow' : 'doc';\n            case '*':\n                yield* this.pushUntil(isNotAnchorChar);\n                return 'flow';\n            case '\"':\n            case \"'\":\n                this.flowKey = true;\n                return yield* this.parseQuotedScalar();\n            case ':': {\n                const next = this.charAt(1);\n                if (this.flowKey || isEmpty(next) || next === ',') {\n                    this.flowKey = false;\n                    yield* this.pushCount(1);\n                    yield* this.pushSpaces(true);\n                    return 'flow';\n                }\n            }\n            // fallthrough\n            default:\n                this.flowKey = false;\n                return yield* this.parsePlainScalar();\n        }\n    }\n    *parseQuotedScalar() {\n        const quote = this.charAt(0);\n        let end = this.buffer.indexOf(quote, this.pos + 1);\n        if (quote === \"'\") {\n            while (end !== -1 && this.buffer[end + 1] === \"'\")\n                end = this.buffer.indexOf(\"'\", end + 2);\n        }\n        else {\n            // double-quote\n            while (end !== -1) {\n                let n = 0;\n                while (this.buffer[end - 1 - n] === '\\\\')\n                    n += 1;\n                if (n % 2 === 0)\n                    break;\n                end = this.buffer.indexOf('\"', end + 1);\n            }\n        }\n        // Only looking for newlines within the quotes\n        const qb = this.buffer.substring(0, end);\n        let nl = qb.indexOf('\\n', this.pos);\n        if (nl !== -1) {\n            while (nl !== -1) {\n                const cs = this.continueScalar(nl + 1);\n                if (cs === -1)\n                    break;\n                nl = qb.indexOf('\\n', cs);\n            }\n            if (nl !== -1) {\n                // this is an error caused by an unexpected unindent\n                end = nl - (qb[nl - 1] === '\\r' ? 2 : 1);\n            }\n        }\n        if (end === -1) {\n            if (!this.atEnd)\n                return this.setNext('quoted-scalar');\n            end = this.buffer.length;\n        }\n        yield* this.pushToIndex(end + 1, false);\n        return this.flowLevel ? 'flow' : 'doc';\n    }\n    *parseBlockScalarHeader() {\n        this.blockScalarIndent = -1;\n        this.blockScalarKeep = false;\n        let i = this.pos;\n        while (true) {\n            const ch = this.buffer[++i];\n            if (ch === '+')\n                this.blockScalarKeep = true;\n            else if (ch > '0' && ch <= '9')\n                this.blockScalarIndent = Number(ch) - 1;\n            else if (ch !== '-')\n                break;\n        }\n        return yield* this.pushUntil(ch => isEmpty(ch) || ch === '#');\n    }\n    *parseBlockScalar() {\n        let nl = this.pos - 1; // may be -1 if this.pos === 0\n        let indent = 0;\n        let ch;\n        loop: for (let i = this.pos; (ch = this.buffer[i]); ++i) {\n            switch (ch) {\n                case ' ':\n                    indent += 1;\n                    break;\n                case '\\n':\n                    nl = i;\n                    indent = 0;\n                    break;\n                case '\\r': {\n                    const next = this.buffer[i + 1];\n                    if (!next && !this.atEnd)\n                        return this.setNext('block-scalar');\n                    if (next === '\\n')\n                        break;\n                } // fallthrough\n                default:\n                    break loop;\n            }\n        }\n        if (!ch && !this.atEnd)\n            return this.setNext('block-scalar');\n        if (indent >= this.indentNext) {\n            if (this.blockScalarIndent === -1)\n                this.indentNext = indent;\n            else {\n                this.indentNext =\n                    this.blockScalarIndent + (this.indentNext === 0 ? 1 : this.indentNext);\n            }\n            do {\n                const cs = this.continueScalar(nl + 1);\n                if (cs === -1)\n                    break;\n                nl = this.buffer.indexOf('\\n', cs);\n            } while (nl !== -1);\n            if (nl === -1) {\n                if (!this.atEnd)\n                    return this.setNext('block-scalar');\n                nl = this.buffer.length;\n            }\n        }\n        // Trailing insufficiently indented tabs are invalid.\n        // To catch that during parsing, we include them in the block scalar value.\n        let i = nl + 1;\n        ch = this.buffer[i];\n        while (ch === ' ')\n            ch = this.buffer[++i];\n        if (ch === '\\t') {\n            while (ch === '\\t' || ch === ' ' || ch === '\\r' || ch === '\\n')\n                ch = this.buffer[++i];\n            nl = i - 1;\n        }\n        else if (!this.blockScalarKeep) {\n            do {\n                let i = nl - 1;\n                let ch = this.buffer[i];\n                if (ch === '\\r')\n                    ch = this.buffer[--i];\n                const lastChar = i; // Drop the line if last char not more indented\n                while (ch === ' ')\n                    ch = this.buffer[--i];\n                if (ch === '\\n' && i >= this.pos && i + 1 + indent > lastChar)\n                    nl = i;\n                else\n                    break;\n            } while (true);\n        }\n        yield cst.SCALAR;\n        yield* this.pushToIndex(nl + 1, true);\n        return yield* this.parseLineStart();\n    }\n    *parsePlainScalar() {\n        const inFlow = this.flowLevel > 0;\n        let end = this.pos - 1;\n        let i = this.pos - 1;\n        let ch;\n        while ((ch = this.buffer[++i])) {\n            if (ch === ':') {\n                const next = this.buffer[i + 1];\n                if (isEmpty(next) || (inFlow && flowIndicatorChars.has(next)))\n                    break;\n                end = i;\n            }\n            else if (isEmpty(ch)) {\n                let next = this.buffer[i + 1];\n                if (ch === '\\r') {\n                    if (next === '\\n') {\n                        i += 1;\n                        ch = '\\n';\n                        next = this.buffer[i + 1];\n                    }\n                    else\n                        end = i;\n                }\n                if (next === '#' || (inFlow && flowIndicatorChars.has(next)))\n                    break;\n                if (ch === '\\n') {\n                    const cs = this.continueScalar(i + 1);\n                    if (cs === -1)\n                        break;\n                    i = Math.max(i, cs - 2); // to advance, but still account for ' #'\n                }\n            }\n            else {\n                if (inFlow && flowIndicatorChars.has(ch))\n                    break;\n                end = i;\n            }\n        }\n        if (!ch && !this.atEnd)\n            return this.setNext('plain-scalar');\n        yield cst.SCALAR;\n        yield* this.pushToIndex(end + 1, true);\n        return inFlow ? 'flow' : 'doc';\n    }\n    *pushCount(n) {\n        if (n > 0) {\n            yield this.buffer.substr(this.pos, n);\n            this.pos += n;\n            return n;\n        }\n        return 0;\n    }\n    *pushToIndex(i, allowEmpty) {\n        const s = this.buffer.slice(this.pos, i);\n        if (s) {\n            yield s;\n            this.pos += s.length;\n            return s.length;\n        }\n        else if (allowEmpty)\n            yield '';\n        return 0;\n    }\n    *pushIndicators() {\n        switch (this.charAt(0)) {\n            case '!':\n                return ((yield* this.pushTag()) +\n                    (yield* this.pushSpaces(true)) +\n                    (yield* this.pushIndicators()));\n            case '&':\n                return ((yield* this.pushUntil(isNotAnchorChar)) +\n                    (yield* this.pushSpaces(true)) +\n                    (yield* this.pushIndicators()));\n            case '-': // this is an error\n            case '?': // this is an error outside flow collections\n            case ':': {\n                const inFlow = this.flowLevel > 0;\n                const ch1 = this.charAt(1);\n                if (isEmpty(ch1) || (inFlow && flowIndicatorChars.has(ch1))) {\n                    if (!inFlow)\n                        this.indentNext = this.indentValue + 1;\n                    else if (this.flowKey)\n                        this.flowKey = false;\n                    return ((yield* this.pushCount(1)) +\n                        (yield* this.pushSpaces(true)) +\n                        (yield* this.pushIndicators()));\n                }\n            }\n        }\n        return 0;\n    }\n    *pushTag() {\n        if (this.charAt(1) === '<') {\n            let i = this.pos + 2;\n            let ch = this.buffer[i];\n            while (!isEmpty(ch) && ch !== '>')\n                ch = this.buffer[++i];\n            return yield* this.pushToIndex(ch === '>' ? i + 1 : i, false);\n        }\n        else {\n            let i = this.pos + 1;\n            let ch = this.buffer[i];\n            while (ch) {\n                if (tagChars.has(ch))\n                    ch = this.buffer[++i];\n                else if (ch === '%' &&\n                    hexDigits.has(this.buffer[i + 1]) &&\n                    hexDigits.has(this.buffer[i + 2])) {\n                    ch = this.buffer[(i += 3)];\n                }\n                else\n                    break;\n            }\n            return yield* this.pushToIndex(i, false);\n        }\n    }\n    *pushNewline() {\n        const ch = this.buffer[this.pos];\n        if (ch === '\\n')\n            return yield* this.pushCount(1);\n        else if (ch === '\\r' && this.charAt(1) === '\\n')\n            return yield* this.pushCount(2);\n        else\n            return 0;\n    }\n    *pushSpaces(allowTabs) {\n        let i = this.pos - 1;\n        let ch;\n        do {\n            ch = this.buffer[++i];\n        } while (ch === ' ' || (allowTabs && ch === '\\t'));\n        const n = i - this.pos;\n        if (n > 0) {\n            yield this.buffer.substr(this.pos, n);\n            this.pos = i;\n        }\n        return n;\n    }\n    *pushUntil(test) {\n        let i = this.pos;\n        let ch = this.buffer[i];\n        while (!test(ch))\n            ch = this.buffer[++i];\n        return yield* this.pushToIndex(i, false);\n    }\n}\n\nexports.Lexer = Lexer;\n","'use strict';\n\n/**\n * Tracks newlines during parsing in order to provide an efficient API for\n * determining the one-indexed `{ line, col }` position for any offset\n * within the input.\n */\nclass LineCounter {\n    constructor() {\n        this.lineStarts = [];\n        /**\n         * Should be called in ascending order. Otherwise, call\n         * `lineCounter.lineStarts.sort()` before calling `linePos()`.\n         */\n        this.addNewLine = (offset) => this.lineStarts.push(offset);\n        /**\n         * Performs a binary search and returns the 1-indexed { line, col }\n         * position of `offset`. If `line === 0`, `addNewLine` has never been\n         * called or `offset` is before the first known newline.\n         */\n        this.linePos = (offset) => {\n            let low = 0;\n            let high = this.lineStarts.length;\n            while (low < high) {\n                const mid = (low + high) >> 1; // Math.floor((low + high) / 2)\n                if (this.lineStarts[mid] < offset)\n                    low = mid + 1;\n                else\n                    high = mid;\n            }\n            if (this.lineStarts[low] === offset)\n                return { line: low + 1, col: 1 };\n            if (low === 0)\n                return { line: 0, col: offset };\n            const start = this.lineStarts[low - 1];\n            return { line: low, col: offset - start + 1 };\n        };\n    }\n}\n\nexports.LineCounter = LineCounter;\n","'use strict';\n\nvar cst = require('./cst.js');\nvar lexer = require('./lexer.js');\n\nfunction includesToken(list, type) {\n    for (let i = 0; i < list.length; ++i)\n        if (list[i].type === type)\n            return true;\n    return false;\n}\nfunction findNonEmptyIndex(list) {\n    for (let i = 0; i < list.length; ++i) {\n        switch (list[i].type) {\n            case 'space':\n            case 'comment':\n            case 'newline':\n                break;\n            default:\n                return i;\n        }\n    }\n    return -1;\n}\nfunction isFlowToken(token) {\n    switch (token?.type) {\n        case 'alias':\n        case 'scalar':\n        case 'single-quoted-scalar':\n        case 'double-quoted-scalar':\n        case 'flow-collection':\n            return true;\n        default:\n            return false;\n    }\n}\nfunction getPrevProps(parent) {\n    switch (parent.type) {\n        case 'document':\n            return parent.start;\n        case 'block-map': {\n            const it = parent.items[parent.items.length - 1];\n            return it.sep ?? it.start;\n        }\n        case 'block-seq':\n            return parent.items[parent.items.length - 1].start;\n        /* istanbul ignore next should not happen */\n        default:\n            return [];\n    }\n}\n/** Note: May modify input array */\nfunction getFirstKeyStartProps(prev) {\n    if (prev.length === 0)\n        return [];\n    let i = prev.length;\n    loop: while (--i >= 0) {\n        switch (prev[i].type) {\n            case 'doc-start':\n            case 'explicit-key-ind':\n            case 'map-value-ind':\n            case 'seq-item-ind':\n            case 'newline':\n                break loop;\n        }\n    }\n    while (prev[++i]?.type === 'space') {\n        /* loop */\n    }\n    return prev.splice(i, prev.length);\n}\nfunction fixFlowSeqItems(fc) {\n    if (fc.start.type === 'flow-seq-start') {\n        for (const it of fc.items) {\n            if (it.sep &&\n                !it.value &&\n                !includesToken(it.start, 'explicit-key-ind') &&\n                !includesToken(it.sep, 'map-value-ind')) {\n                if (it.key)\n                    it.value = it.key;\n                delete it.key;\n                if (isFlowToken(it.value)) {\n                    if (it.value.end)\n                        Array.prototype.push.apply(it.value.end, it.sep);\n                    else\n                        it.value.end = it.sep;\n                }\n                else\n                    Array.prototype.push.apply(it.start, it.sep);\n                delete it.sep;\n            }\n        }\n    }\n}\n/**\n * A YAML concrete syntax tree (CST) parser\n *\n * ```ts\n * const src: string = ...\n * for (const token of new Parser().parse(src)) {\n *   // token: Token\n * }\n * ```\n *\n * To use the parser with a user-provided lexer:\n *\n * ```ts\n * function* parse(source: string, lexer: Lexer) {\n *   const parser = new Parser()\n *   for (const lexeme of lexer.lex(source))\n *     yield* parser.next(lexeme)\n *   yield* parser.end()\n * }\n *\n * const src: string = ...\n * const lexer = new Lexer()\n * for (const token of parse(src, lexer)) {\n *   // token: Token\n * }\n * ```\n */\nclass Parser {\n    /**\n     * @param onNewLine - If defined, called separately with the start position of\n     *   each new line (in `parse()`, including the start of input).\n     */\n    constructor(onNewLine) {\n        /** If true, space and sequence indicators count as indentation */\n        this.atNewLine = true;\n        /** If true, next token is a scalar value */\n        this.atScalar = false;\n        /** Current indentation level */\n        this.indent = 0;\n        /** Current offset since the start of parsing */\n        this.offset = 0;\n        /** On the same line with a block map key */\n        this.onKeyLine = false;\n        /** Top indicates the node that's currently being built */\n        this.stack = [];\n        /** The source of the current token, set in parse() */\n        this.source = '';\n        /** The type of the current token, set in parse() */\n        this.type = '';\n        // Must be defined after `next()`\n        this.lexer = new lexer.Lexer();\n        this.onNewLine = onNewLine;\n    }\n    /**\n     * Parse `source` as a YAML stream.\n     * If `incomplete`, a part of the last line may be left as a buffer for the next call.\n     *\n     * Errors are not thrown, but yielded as `{ type: 'error', message }` tokens.\n     *\n     * @returns A generator of tokens representing each directive, document, and other structure.\n     */\n    *parse(source, incomplete = false) {\n        if (this.onNewLine && this.offset === 0)\n            this.onNewLine(0);\n        for (const lexeme of this.lexer.lex(source, incomplete))\n            yield* this.next(lexeme);\n        if (!incomplete)\n            yield* this.end();\n    }\n    /**\n     * Advance the parser by the `source` of one lexical token.\n     */\n    *next(source) {\n        this.source = source;\n        if (process.env.LOG_TOKENS)\n            console.log('|', cst.prettyToken(source));\n        if (this.atScalar) {\n            this.atScalar = false;\n            yield* this.step();\n            this.offset += source.length;\n            return;\n        }\n        const type = cst.tokenType(source);\n        if (!type) {\n            const message = `Not a YAML token: ${source}`;\n            yield* this.pop({ type: 'error', offset: this.offset, message, source });\n            this.offset += source.length;\n        }\n        else if (type === 'scalar') {\n            this.atNewLine = false;\n            this.atScalar = true;\n            this.type = 'scalar';\n        }\n        else {\n            this.type = type;\n            yield* this.step();\n            switch (type) {\n                case 'newline':\n                    this.atNewLine = true;\n                    this.indent = 0;\n                    if (this.onNewLine)\n                        this.onNewLine(this.offset + source.length);\n                    break;\n                case 'space':\n                    if (this.atNewLine && source[0] === ' ')\n                        this.indent += source.length;\n                    break;\n                case 'explicit-key-ind':\n                case 'map-value-ind':\n                case 'seq-item-ind':\n                    if (this.atNewLine)\n                        this.indent += source.length;\n                    break;\n                case 'doc-mode':\n                case 'flow-error-end':\n                    return;\n                default:\n                    this.atNewLine = false;\n            }\n            this.offset += source.length;\n        }\n    }\n    /** Call at end of input to push out any remaining constructions */\n    *end() {\n        while (this.stack.length > 0)\n            yield* this.pop();\n    }\n    get sourceToken() {\n        const st = {\n            type: this.type,\n            offset: this.offset,\n            indent: this.indent,\n            source: this.source\n        };\n        return st;\n    }\n    *step() {\n        const top = this.peek(1);\n        if (this.type === 'doc-end' && (!top || top.type !== 'doc-end')) {\n            while (this.stack.length > 0)\n                yield* this.pop();\n            this.stack.push({\n                type: 'doc-end',\n                offset: this.offset,\n                source: this.source\n            });\n            return;\n        }\n        if (!top)\n            return yield* this.stream();\n        switch (top.type) {\n            case 'document':\n                return yield* this.document(top);\n            case 'alias':\n            case 'scalar':\n            case 'single-quoted-scalar':\n            case 'double-quoted-scalar':\n                return yield* this.scalar(top);\n            case 'block-scalar':\n                return yield* this.blockScalar(top);\n            case 'block-map':\n                return yield* this.blockMap(top);\n            case 'block-seq':\n                return yield* this.blockSequence(top);\n            case 'flow-collection':\n                return yield* this.flowCollection(top);\n            case 'doc-end':\n                return yield* this.documentEnd(top);\n        }\n        /* istanbul ignore next should not happen */\n        yield* this.pop();\n    }\n    peek(n) {\n        return this.stack[this.stack.length - n];\n    }\n    *pop(error) {\n        const token = error ?? this.stack.pop();\n        /* istanbul ignore if should not happen */\n        if (!token) {\n            const message = 'Tried to pop an empty stack';\n            yield { type: 'error', offset: this.offset, source: '', message };\n        }\n        else if (this.stack.length === 0) {\n            yield token;\n        }\n        else {\n            const top = this.peek(1);\n            if (token.type === 'block-scalar') {\n                // Block scalars use their parent rather than header indent\n                token.indent = 'indent' in top ? top.indent : 0;\n            }\n            else if (token.type === 'flow-collection' && top.type === 'document') {\n                // Ignore all indent for top-level flow collections\n                token.indent = 0;\n            }\n            if (token.type === 'flow-collection')\n                fixFlowSeqItems(token);\n            switch (top.type) {\n                case 'document':\n                    top.value = token;\n                    break;\n                case 'block-scalar':\n                    top.props.push(token); // error\n                    break;\n                case 'block-map': {\n                    const it = top.items[top.items.length - 1];\n                    if (it.value) {\n                        top.items.push({ start: [], key: token, sep: [] });\n                        this.onKeyLine = true;\n                        return;\n                    }\n                    else if (it.sep) {\n                        it.value = token;\n                    }\n                    else {\n                        Object.assign(it, { key: token, sep: [] });\n                        this.onKeyLine = !it.explicitKey;\n                        return;\n                    }\n                    break;\n                }\n                case 'block-seq': {\n                    const it = top.items[top.items.length - 1];\n                    if (it.value)\n                        top.items.push({ start: [], value: token });\n                    else\n                        it.value = token;\n                    break;\n                }\n                case 'flow-collection': {\n                    const it = top.items[top.items.length - 1];\n                    if (!it || it.value)\n                        top.items.push({ start: [], key: token, sep: [] });\n                    else if (it.sep)\n                        it.value = token;\n                    else\n                        Object.assign(it, { key: token, sep: [] });\n                    return;\n                }\n                /* istanbul ignore next should not happen */\n                default:\n                    yield* this.pop();\n                    yield* this.pop(token);\n            }\n            if ((top.type === 'document' ||\n                top.type === 'block-map' ||\n                top.type === 'block-seq') &&\n                (token.type === 'block-map' || token.type === 'block-seq')) {\n                const last = token.items[token.items.length - 1];\n                if (last &&\n                    !last.sep &&\n                    !last.value &&\n                    last.start.length > 0 &&\n                    findNonEmptyIndex(last.start) === -1 &&\n                    (token.indent === 0 ||\n                        last.start.every(st => st.type !== 'comment' || st.indent < token.indent))) {\n                    if (top.type === 'document')\n                        top.end = last.start;\n                    else\n                        top.items.push({ start: last.start });\n                    token.items.splice(-1, 1);\n                }\n            }\n        }\n    }\n    *stream() {\n        switch (this.type) {\n            case 'directive-line':\n                yield { type: 'directive', offset: this.offset, source: this.source };\n                return;\n            case 'byte-order-mark':\n            case 'space':\n            case 'comment':\n            case 'newline':\n                yield this.sourceToken;\n                return;\n            case 'doc-mode':\n            case 'doc-start': {\n                const doc = {\n                    type: 'document',\n                    offset: this.offset,\n                    start: []\n                };\n                if (this.type === 'doc-start')\n                    doc.start.push(this.sourceToken);\n                this.stack.push(doc);\n                return;\n            }\n        }\n        yield {\n            type: 'error',\n            offset: this.offset,\n            message: `Unexpected ${this.type} token in YAML stream`,\n            source: this.source\n        };\n    }\n    *document(doc) {\n        if (doc.value)\n            return yield* this.lineEnd(doc);\n        switch (this.type) {\n            case 'doc-start': {\n                if (findNonEmptyIndex(doc.start) !== -1) {\n                    yield* this.pop();\n                    yield* this.step();\n                }\n                else\n                    doc.start.push(this.sourceToken);\n                return;\n            }\n            case 'anchor':\n            case 'tag':\n            case 'space':\n            case 'comment':\n            case 'newline':\n                doc.start.push(this.sourceToken);\n                return;\n        }\n        const bv = this.startBlockValue(doc);\n        if (bv)\n            this.stack.push(bv);\n        else {\n            yield {\n                type: 'error',\n                offset: this.offset,\n                message: `Unexpected ${this.type} token in YAML document`,\n                source: this.source\n            };\n        }\n    }\n    *scalar(scalar) {\n        if (this.type === 'map-value-ind') {\n            const prev = getPrevProps(this.peek(2));\n            const start = getFirstKeyStartProps(prev);\n            let sep;\n            if (scalar.end) {\n                sep = scalar.end;\n                sep.push(this.sourceToken);\n                delete scalar.end;\n            }\n            else\n                sep = [this.sourceToken];\n            const map = {\n                type: 'block-map',\n                offset: scalar.offset,\n                indent: scalar.indent,\n                items: [{ start, key: scalar, sep }]\n            };\n            this.onKeyLine = true;\n            this.stack[this.stack.length - 1] = map;\n        }\n        else\n            yield* this.lineEnd(scalar);\n    }\n    *blockScalar(scalar) {\n        switch (this.type) {\n            case 'space':\n            case 'comment':\n            case 'newline':\n                scalar.props.push(this.sourceToken);\n                return;\n            case 'scalar':\n                scalar.source = this.source;\n                // block-scalar source includes trailing newline\n                this.atNewLine = true;\n                this.indent = 0;\n                if (this.onNewLine) {\n                    let nl = this.source.indexOf('\\n') + 1;\n                    while (nl !== 0) {\n                        this.onNewLine(this.offset + nl);\n                        nl = this.source.indexOf('\\n', nl) + 1;\n                    }\n                }\n                yield* this.pop();\n                break;\n            /* istanbul ignore next should not happen */\n            default:\n                yield* this.pop();\n                yield* this.step();\n        }\n    }\n    *blockMap(map) {\n        const it = map.items[map.items.length - 1];\n        // it.sep is true-ish if pair already has key or : separator\n        switch (this.type) {\n            case 'newline':\n                this.onKeyLine = false;\n                if (it.value) {\n                    const end = 'end' in it.value ? it.value.end : undefined;\n                    const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n                    if (last?.type === 'comment')\n                        end?.push(this.sourceToken);\n                    else\n                        map.items.push({ start: [this.sourceToken] });\n                }\n                else if (it.sep) {\n                    it.sep.push(this.sourceToken);\n                }\n                else {\n                    it.start.push(this.sourceToken);\n                }\n                return;\n            case 'space':\n            case 'comment':\n                if (it.value) {\n                    map.items.push({ start: [this.sourceToken] });\n                }\n                else if (it.sep) {\n                    it.sep.push(this.sourceToken);\n                }\n                else {\n                    if (this.atIndentedComment(it.start, map.indent)) {\n                        const prev = map.items[map.items.length - 2];\n                        const end = prev?.value?.end;\n                        if (Array.isArray(end)) {\n                            Array.prototype.push.apply(end, it.start);\n                            end.push(this.sourceToken);\n                            map.items.pop();\n                            return;\n                        }\n                    }\n                    it.start.push(this.sourceToken);\n                }\n                return;\n        }\n        if (this.indent >= map.indent) {\n            const atMapIndent = !this.onKeyLine && this.indent === map.indent;\n            const atNextItem = atMapIndent &&\n                (it.sep || it.explicitKey) &&\n                this.type !== 'seq-item-ind';\n            // For empty nodes, assign newline-separated not indented empty tokens to following node\n            let start = [];\n            if (atNextItem && it.sep && !it.value) {\n                const nl = [];\n                for (let i = 0; i < it.sep.length; ++i) {\n                    const st = it.sep[i];\n                    switch (st.type) {\n                        case 'newline':\n                            nl.push(i);\n                            break;\n                        case 'space':\n                            break;\n                        case 'comment':\n                            if (st.indent > map.indent)\n                                nl.length = 0;\n                            break;\n                        default:\n                            nl.length = 0;\n                    }\n                }\n                if (nl.length >= 2)\n                    start = it.sep.splice(nl[1]);\n            }\n            switch (this.type) {\n                case 'anchor':\n                case 'tag':\n                    if (atNextItem || it.value) {\n                        start.push(this.sourceToken);\n                        map.items.push({ start });\n                        this.onKeyLine = true;\n                    }\n                    else if (it.sep) {\n                        it.sep.push(this.sourceToken);\n                    }\n                    else {\n                        it.start.push(this.sourceToken);\n                    }\n                    return;\n                case 'explicit-key-ind':\n                    if (!it.sep && !it.explicitKey) {\n                        it.start.push(this.sourceToken);\n                        it.explicitKey = true;\n                    }\n                    else if (atNextItem || it.value) {\n                        start.push(this.sourceToken);\n                        map.items.push({ start, explicitKey: true });\n                    }\n                    else {\n                        this.stack.push({\n                            type: 'block-map',\n                            offset: this.offset,\n                            indent: this.indent,\n                            items: [{ start: [this.sourceToken], explicitKey: true }]\n                        });\n                    }\n                    this.onKeyLine = true;\n                    return;\n                case 'map-value-ind':\n                    if (it.explicitKey) {\n                        if (!it.sep) {\n                            if (includesToken(it.start, 'newline')) {\n                                Object.assign(it, { key: null, sep: [this.sourceToken] });\n                            }\n                            else {\n                                const start = getFirstKeyStartProps(it.start);\n                                this.stack.push({\n                                    type: 'block-map',\n                                    offset: this.offset,\n                                    indent: this.indent,\n                                    items: [{ start, key: null, sep: [this.sourceToken] }]\n                                });\n                            }\n                        }\n                        else if (it.value) {\n                            map.items.push({ start: [], key: null, sep: [this.sourceToken] });\n                        }\n                        else if (includesToken(it.sep, 'map-value-ind')) {\n                            this.stack.push({\n                                type: 'block-map',\n                                offset: this.offset,\n                                indent: this.indent,\n                                items: [{ start, key: null, sep: [this.sourceToken] }]\n                            });\n                        }\n                        else if (isFlowToken(it.key) &&\n                            !includesToken(it.sep, 'newline')) {\n                            const start = getFirstKeyStartProps(it.start);\n                            const key = it.key;\n                            const sep = it.sep;\n                            sep.push(this.sourceToken);\n                            // @ts-expect-error type guard is wrong here\n                            delete it.key;\n                            // @ts-expect-error type guard is wrong here\n                            delete it.sep;\n                            this.stack.push({\n                                type: 'block-map',\n                                offset: this.offset,\n                                indent: this.indent,\n                                items: [{ start, key, sep }]\n                            });\n                        }\n                        else if (start.length > 0) {\n                            // Not actually at next item\n                            it.sep = it.sep.concat(start, this.sourceToken);\n                        }\n                        else {\n                            it.sep.push(this.sourceToken);\n                        }\n                    }\n                    else {\n                        if (!it.sep) {\n                            Object.assign(it, { key: null, sep: [this.sourceToken] });\n                        }\n                        else if (it.value || atNextItem) {\n                            map.items.push({ start, key: null, sep: [this.sourceToken] });\n                        }\n                        else if (includesToken(it.sep, 'map-value-ind')) {\n                            this.stack.push({\n                                type: 'block-map',\n                                offset: this.offset,\n                                indent: this.indent,\n                                items: [{ start: [], key: null, sep: [this.sourceToken] }]\n                            });\n                        }\n                        else {\n                            it.sep.push(this.sourceToken);\n                        }\n                    }\n                    this.onKeyLine = true;\n                    return;\n                case 'alias':\n                case 'scalar':\n                case 'single-quoted-scalar':\n                case 'double-quoted-scalar': {\n                    const fs = this.flowScalar(this.type);\n                    if (atNextItem || it.value) {\n                        map.items.push({ start, key: fs, sep: [] });\n                        this.onKeyLine = true;\n                    }\n                    else if (it.sep) {\n                        this.stack.push(fs);\n                    }\n                    else {\n                        Object.assign(it, { key: fs, sep: [] });\n                        this.onKeyLine = true;\n                    }\n                    return;\n                }\n                default: {\n                    const bv = this.startBlockValue(map);\n                    if (bv) {\n                        if (atMapIndent && bv.type !== 'block-seq') {\n                            map.items.push({ start });\n                        }\n                        this.stack.push(bv);\n                        return;\n                    }\n                }\n            }\n        }\n        yield* this.pop();\n        yield* this.step();\n    }\n    *blockSequence(seq) {\n        const it = seq.items[seq.items.length - 1];\n        switch (this.type) {\n            case 'newline':\n                if (it.value) {\n                    const end = 'end' in it.value ? it.value.end : undefined;\n                    const last = Array.isArray(end) ? end[end.length - 1] : undefined;\n                    if (last?.type === 'comment')\n                        end?.push(this.sourceToken);\n                    else\n                        seq.items.push({ start: [this.sourceToken] });\n                }\n                else\n                    it.start.push(this.sourceToken);\n                return;\n            case 'space':\n            case 'comment':\n                if (it.value)\n                    seq.items.push({ start: [this.sourceToken] });\n                else {\n                    if (this.atIndentedComment(it.start, seq.indent)) {\n                        const prev = seq.items[seq.items.length - 2];\n                        const end = prev?.value?.end;\n                        if (Array.isArray(end)) {\n                            Array.prototype.push.apply(end, it.start);\n                            end.push(this.sourceToken);\n                            seq.items.pop();\n                            return;\n                        }\n                    }\n                    it.start.push(this.sourceToken);\n                }\n                return;\n            case 'anchor':\n            case 'tag':\n                if (it.value || this.indent <= seq.indent)\n                    break;\n                it.start.push(this.sourceToken);\n                return;\n            case 'seq-item-ind':\n                if (this.indent !== seq.indent)\n                    break;\n                if (it.value || includesToken(it.start, 'seq-item-ind'))\n                    seq.items.push({ start: [this.sourceToken] });\n                else\n                    it.start.push(this.sourceToken);\n                return;\n        }\n        if (this.indent > seq.indent) {\n            const bv = this.startBlockValue(seq);\n            if (bv) {\n                this.stack.push(bv);\n                return;\n            }\n        }\n        yield* this.pop();\n        yield* this.step();\n    }\n    *flowCollection(fc) {\n        const it = fc.items[fc.items.length - 1];\n        if (this.type === 'flow-error-end') {\n            let top;\n            do {\n                yield* this.pop();\n                top = this.peek(1);\n            } while (top && top.type === 'flow-collection');\n        }\n        else if (fc.end.length === 0) {\n            switch (this.type) {\n                case 'comma':\n                case 'explicit-key-ind':\n                    if (!it || it.sep)\n                        fc.items.push({ start: [this.sourceToken] });\n                    else\n                        it.start.push(this.sourceToken);\n                    return;\n                case 'map-value-ind':\n                    if (!it || it.value)\n                        fc.items.push({ start: [], key: null, sep: [this.sourceToken] });\n                    else if (it.sep)\n                        it.sep.push(this.sourceToken);\n                    else\n                        Object.assign(it, { key: null, sep: [this.sourceToken] });\n                    return;\n                case 'space':\n                case 'comment':\n                case 'newline':\n                case 'anchor':\n                case 'tag':\n                    if (!it || it.value)\n                        fc.items.push({ start: [this.sourceToken] });\n                    else if (it.sep)\n                        it.sep.push(this.sourceToken);\n                    else\n                        it.start.push(this.sourceToken);\n                    return;\n                case 'alias':\n                case 'scalar':\n                case 'single-quoted-scalar':\n                case 'double-quoted-scalar': {\n                    const fs = this.flowScalar(this.type);\n                    if (!it || it.value)\n                        fc.items.push({ start: [], key: fs, sep: [] });\n                    else if (it.sep)\n                        this.stack.push(fs);\n                    else\n                        Object.assign(it, { key: fs, sep: [] });\n                    return;\n                }\n                case 'flow-map-end':\n                case 'flow-seq-end':\n                    fc.end.push(this.sourceToken);\n                    return;\n            }\n            const bv = this.startBlockValue(fc);\n            /* istanbul ignore else should not happen */\n            if (bv)\n                this.stack.push(bv);\n            else {\n                yield* this.pop();\n                yield* this.step();\n            }\n        }\n        else {\n            const parent = this.peek(2);\n            if (parent.type === 'block-map' &&\n                ((this.type === 'map-value-ind' && parent.indent === fc.indent) ||\n                    (this.type === 'newline' &&\n                        !parent.items[parent.items.length - 1].sep))) {\n                yield* this.pop();\n                yield* this.step();\n            }\n            else if (this.type === 'map-value-ind' &&\n                parent.type !== 'flow-collection') {\n                const prev = getPrevProps(parent);\n                const start = getFirstKeyStartProps(prev);\n                fixFlowSeqItems(fc);\n                const sep = fc.end.splice(1, fc.end.length);\n                sep.push(this.sourceToken);\n                const map = {\n                    type: 'block-map',\n                    offset: fc.offset,\n                    indent: fc.indent,\n                    items: [{ start, key: fc, sep }]\n                };\n                this.onKeyLine = true;\n                this.stack[this.stack.length - 1] = map;\n            }\n            else {\n                yield* this.lineEnd(fc);\n            }\n        }\n    }\n    flowScalar(type) {\n        if (this.onNewLine) {\n            let nl = this.source.indexOf('\\n') + 1;\n            while (nl !== 0) {\n                this.onNewLine(this.offset + nl);\n                nl = this.source.indexOf('\\n', nl) + 1;\n            }\n        }\n        return {\n            type,\n            offset: this.offset,\n            indent: this.indent,\n            source: this.source\n        };\n    }\n    startBlockValue(parent) {\n        switch (this.type) {\n            case 'alias':\n            case 'scalar':\n            case 'single-quoted-scalar':\n            case 'double-quoted-scalar':\n                return this.flowScalar(this.type);\n            case 'block-scalar-header':\n                return {\n                    type: 'block-scalar',\n                    offset: this.offset,\n                    indent: this.indent,\n                    props: [this.sourceToken],\n                    source: ''\n                };\n            case 'flow-map-start':\n            case 'flow-seq-start':\n                return {\n                    type: 'flow-collection',\n                    offset: this.offset,\n                    indent: this.indent,\n                    start: this.sourceToken,\n                    items: [],\n                    end: []\n                };\n            case 'seq-item-ind':\n                return {\n                    type: 'block-seq',\n                    offset: this.offset,\n                    indent: this.indent,\n                    items: [{ start: [this.sourceToken] }]\n                };\n            case 'explicit-key-ind': {\n                this.onKeyLine = true;\n                const prev = getPrevProps(parent);\n                const start = getFirstKeyStartProps(prev);\n                start.push(this.sourceToken);\n                return {\n                    type: 'block-map',\n                    offset: this.offset,\n                    indent: this.indent,\n                    items: [{ start, explicitKey: true }]\n                };\n            }\n            case 'map-value-ind': {\n                this.onKeyLine = true;\n                const prev = getPrevProps(parent);\n                const start = getFirstKeyStartProps(prev);\n                return {\n                    type: 'block-map',\n                    offset: this.offset,\n                    indent: this.indent,\n                    items: [{ start, key: null, sep: [this.sourceToken] }]\n                };\n            }\n        }\n        return null;\n    }\n    atIndentedComment(start, indent) {\n        if (this.type !== 'comment')\n            return false;\n        if (this.indent <= indent)\n            return false;\n        return start.every(st => st.type === 'newline' || st.type === 'space');\n    }\n    *documentEnd(docEnd) {\n        if (this.type !== 'doc-mode') {\n            if (docEnd.end)\n                docEnd.end.push(this.sourceToken);\n            else\n                docEnd.end = [this.sourceToken];\n            if (this.type === 'newline')\n                yield* this.pop();\n        }\n    }\n    *lineEnd(token) {\n        switch (this.type) {\n            case 'comma':\n            case 'doc-start':\n            case 'doc-end':\n            case 'flow-seq-end':\n            case 'flow-map-end':\n            case 'map-value-ind':\n                yield* this.pop();\n                yield* this.step();\n                break;\n            case 'newline':\n                this.onKeyLine = false;\n            // fallthrough\n            case 'space':\n            case 'comment':\n            default:\n                // all other values are errors\n                if (token.end)\n                    token.end.push(this.sourceToken);\n                else\n                    token.end = [this.sourceToken];\n                if (this.type === 'newline')\n                    yield* this.pop();\n        }\n    }\n}\n\nexports.Parser = Parser;\n","'use strict';\n\nvar composer = require('./compose/composer.js');\nvar Document = require('./doc/Document.js');\nvar errors = require('./errors.js');\nvar log = require('./log.js');\nvar identity = require('./nodes/identity.js');\nvar lineCounter = require('./parse/line-counter.js');\nvar parser = require('./parse/parser.js');\n\nfunction parseOptions(options) {\n    const prettyErrors = options.prettyErrors !== false;\n    const lineCounter$1 = options.lineCounter || (prettyErrors && new lineCounter.LineCounter()) || null;\n    return { lineCounter: lineCounter$1, prettyErrors };\n}\n/**\n * Parse the input as a stream of YAML documents.\n *\n * Documents should be separated from each other by `...` or `---` marker lines.\n *\n * @returns If an empty `docs` array is returned, it will be of type\n *   EmptyStream and contain additional stream information. In\n *   TypeScript, you should use `'empty' in docs` as a type guard for it.\n */\nfunction parseAllDocuments(source, options = {}) {\n    const { lineCounter, prettyErrors } = parseOptions(options);\n    const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n    const composer$1 = new composer.Composer(options);\n    const docs = Array.from(composer$1.compose(parser$1.parse(source)));\n    if (prettyErrors && lineCounter)\n        for (const doc of docs) {\n            doc.errors.forEach(errors.prettifyError(source, lineCounter));\n            doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n        }\n    if (docs.length > 0)\n        return docs;\n    return Object.assign([], { empty: true }, composer$1.streamInfo());\n}\n/** Parse an input string into a single YAML.Document */\nfunction parseDocument(source, options = {}) {\n    const { lineCounter, prettyErrors } = parseOptions(options);\n    const parser$1 = new parser.Parser(lineCounter?.addNewLine);\n    const composer$1 = new composer.Composer(options);\n    // `doc` is always set by compose.end(true) at the very latest\n    let doc = null;\n    for (const _doc of composer$1.compose(parser$1.parse(source), true, source.length)) {\n        if (!doc)\n            doc = _doc;\n        else if (doc.options.logLevel !== 'silent') {\n            doc.errors.push(new errors.YAMLParseError(_doc.range.slice(0, 2), 'MULTIPLE_DOCS', 'Source contains multiple documents; please use YAML.parseAllDocuments()'));\n            break;\n        }\n    }\n    if (prettyErrors && lineCounter) {\n        doc.errors.forEach(errors.prettifyError(source, lineCounter));\n        doc.warnings.forEach(errors.prettifyError(source, lineCounter));\n    }\n    return doc;\n}\nfunction parse(src, reviver, options) {\n    let _reviver = undefined;\n    if (typeof reviver === 'function') {\n        _reviver = reviver;\n    }\n    else if (options === undefined && reviver && typeof reviver === 'object') {\n        options = reviver;\n    }\n    const doc = parseDocument(src, options);\n    if (!doc)\n        return null;\n    doc.warnings.forEach(warning => log.warn(doc.options.logLevel, warning));\n    if (doc.errors.length > 0) {\n        if (doc.options.logLevel !== 'silent')\n            throw doc.errors[0];\n        else\n            doc.errors = [];\n    }\n    return doc.toJS(Object.assign({ reviver: _reviver }, options));\n}\nfunction stringify(value, replacer, options) {\n    let _replacer = null;\n    if (typeof replacer === 'function' || Array.isArray(replacer)) {\n        _replacer = replacer;\n    }\n    else if (options === undefined && replacer) {\n        options = replacer;\n    }\n    if (typeof options === 'string')\n        options = options.length;\n    if (typeof options === 'number') {\n        const indent = Math.round(options);\n        options = indent < 1 ? undefined : indent > 8 ? { indent: 8 } : { indent };\n    }\n    if (value === undefined) {\n        const { keepUndefined } = options ?? replacer ?? {};\n        if (!keepUndefined)\n            return undefined;\n    }\n    if (identity.isDocument(value) && !_replacer)\n        return value.toString(options);\n    return new Document.Document(value, _replacer, options).toString(options);\n}\n\nexports.parse = parse;\nexports.parseAllDocuments = parseAllDocuments;\nexports.parseDocument = parseDocument;\nexports.stringify = stringify;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar map = require('./common/map.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar tags = require('./tags.js');\n\nconst sortMapEntriesByKey = (a, b) => a.key < b.key ? -1 : a.key > b.key ? 1 : 0;\nclass Schema {\n    constructor({ compat, customTags, merge, resolveKnownTags, schema, sortMapEntries, toStringDefaults }) {\n        this.compat = Array.isArray(compat)\n            ? tags.getTags(compat, 'compat')\n            : compat\n                ? tags.getTags(null, compat)\n                : null;\n        this.name = (typeof schema === 'string' && schema) || 'core';\n        this.knownTags = resolveKnownTags ? tags.coreKnownTags : {};\n        this.tags = tags.getTags(customTags, this.name, merge);\n        this.toStringOptions = toStringDefaults ?? null;\n        Object.defineProperty(this, identity.MAP, { value: map.map });\n        Object.defineProperty(this, identity.SCALAR, { value: string.string });\n        Object.defineProperty(this, identity.SEQ, { value: seq.seq });\n        // Used by createMap()\n        this.sortMapEntries =\n            typeof sortMapEntries === 'function'\n                ? sortMapEntries\n                : sortMapEntries === true\n                    ? sortMapEntriesByKey\n                    : null;\n    }\n    clone() {\n        const copy = Object.create(Schema.prototype, Object.getOwnPropertyDescriptors(this));\n        copy.tags = this.tags.slice();\n        return copy;\n    }\n}\n\nexports.Schema = Schema;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nconst map = {\n    collection: 'map',\n    default: true,\n    nodeClass: YAMLMap.YAMLMap,\n    tag: 'tag:yaml.org,2002:map',\n    resolve(map, onError) {\n        if (!identity.isMap(map))\n            onError('Expected a mapping for this tag');\n        return map;\n    },\n    createNode: (schema, obj, ctx) => YAMLMap.YAMLMap.from(schema, obj, ctx)\n};\n\nexports.map = map;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst nullTag = {\n    identify: value => value == null,\n    createNode: () => new Scalar.Scalar(null),\n    default: true,\n    tag: 'tag:yaml.org,2002:null',\n    test: /^(?:~|[Nn]ull|NULL)?$/,\n    resolve: () => new Scalar.Scalar(null),\n    stringify: ({ source }, ctx) => typeof source === 'string' && nullTag.test.test(source)\n        ? source\n        : ctx.options.nullStr\n};\n\nexports.nullTag = nullTag;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nconst seq = {\n    collection: 'seq',\n    default: true,\n    nodeClass: YAMLSeq.YAMLSeq,\n    tag: 'tag:yaml.org,2002:seq',\n    resolve(seq, onError) {\n        if (!identity.isSeq(seq))\n            onError('Expected a sequence for this tag');\n        return seq;\n    },\n    createNode: (schema, obj, ctx) => YAMLSeq.YAMLSeq.from(schema, obj, ctx)\n};\n\nexports.seq = seq;\n","'use strict';\n\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst string = {\n    identify: value => typeof value === 'string',\n    default: true,\n    tag: 'tag:yaml.org,2002:str',\n    resolve: str => str,\n    stringify(item, ctx, onComment, onChompKeep) {\n        ctx = Object.assign({ actualString: true }, ctx);\n        return stringifyString.stringifyString(item, ctx, onComment, onChompKeep);\n    }\n};\n\nexports.string = string;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nconst boolTag = {\n    identify: value => typeof value === 'boolean',\n    default: true,\n    tag: 'tag:yaml.org,2002:bool',\n    test: /^(?:[Tt]rue|TRUE|[Ff]alse|FALSE)$/,\n    resolve: str => new Scalar.Scalar(str[0] === 't' || str[0] === 'T'),\n    stringify({ source, value }, ctx) {\n        if (source && boolTag.test.test(source)) {\n            const sv = source[0] === 't' || source[0] === 'T';\n            if (value === sv)\n                return source;\n        }\n        return value ? ctx.options.trueStr : ctx.options.falseStr;\n    }\n};\n\nexports.boolTag = boolTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    test: /^(?:[-+]?\\.(?:inf|Inf|INF)|\\.nan|\\.NaN|\\.NAN)$/,\n    resolve: str => str.slice(-3).toLowerCase() === 'nan'\n        ? NaN\n        : str[0] === '-'\n            ? Number.NEGATIVE_INFINITY\n            : Number.POSITIVE_INFINITY,\n    stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    format: 'EXP',\n    test: /^[-+]?(?:\\.[0-9]+|[0-9]+(?:\\.[0-9]*)?)[eE][-+]?[0-9]+$/,\n    resolve: str => parseFloat(str),\n    stringify(node) {\n        const num = Number(node.value);\n        return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n    }\n};\nconst float = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    test: /^[-+]?(?:\\.[0-9]+|[0-9]+\\.[0-9]*)$/,\n    resolve(str) {\n        const node = new Scalar.Scalar(parseFloat(str));\n        const dot = str.indexOf('.');\n        if (dot !== -1 && str[str.length - 1] === '0')\n            node.minFractionDigits = str.length - dot - 1;\n        return node;\n    },\n    stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nconst intResolve = (str, offset, radix, { intAsBigInt }) => (intAsBigInt ? BigInt(str) : parseInt(str.substring(offset), radix));\nfunction intStringify(node, radix, prefix) {\n    const { value } = node;\n    if (intIdentify(value) && value >= 0)\n        return prefix + value.toString(radix);\n    return stringifyNumber.stringifyNumber(node);\n}\nconst intOct = {\n    identify: value => intIdentify(value) && value >= 0,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'OCT',\n    test: /^0o[0-7]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 2, 8, opt),\n    stringify: node => intStringify(node, 8, '0o')\n};\nconst int = {\n    identify: intIdentify,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    test: /^[-+]?[0-9]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n    stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n    identify: value => intIdentify(value) && value >= 0,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'HEX',\n    test: /^0x[0-9a-fA-F]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n    stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\n\nconst schema = [\n    map.map,\n    seq.seq,\n    string.string,\n    _null.nullTag,\n    bool.boolTag,\n    int.intOct,\n    int.int,\n    int.intHex,\n    float.floatNaN,\n    float.floatExp,\n    float.float\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar map = require('../common/map.js');\nvar seq = require('../common/seq.js');\n\nfunction intIdentify(value) {\n    return typeof value === 'bigint' || Number.isInteger(value);\n}\nconst stringifyJSON = ({ value }) => JSON.stringify(value);\nconst jsonScalars = [\n    {\n        identify: value => typeof value === 'string',\n        default: true,\n        tag: 'tag:yaml.org,2002:str',\n        resolve: str => str,\n        stringify: stringifyJSON\n    },\n    {\n        identify: value => value == null,\n        createNode: () => new Scalar.Scalar(null),\n        default: true,\n        tag: 'tag:yaml.org,2002:null',\n        test: /^null$/,\n        resolve: () => null,\n        stringify: stringifyJSON\n    },\n    {\n        identify: value => typeof value === 'boolean',\n        default: true,\n        tag: 'tag:yaml.org,2002:bool',\n        test: /^true$|^false$/,\n        resolve: str => str === 'true',\n        stringify: stringifyJSON\n    },\n    {\n        identify: intIdentify,\n        default: true,\n        tag: 'tag:yaml.org,2002:int',\n        test: /^-?(?:0|[1-9][0-9]*)$/,\n        resolve: (str, _onError, { intAsBigInt }) => intAsBigInt ? BigInt(str) : parseInt(str, 10),\n        stringify: ({ value }) => intIdentify(value) ? value.toString() : JSON.stringify(value)\n    },\n    {\n        identify: value => typeof value === 'number',\n        default: true,\n        tag: 'tag:yaml.org,2002:float',\n        test: /^-?(?:0|[1-9][0-9]*)(?:\\.[0-9]*)?(?:[eE][-+]?[0-9]+)?$/,\n        resolve: str => parseFloat(str),\n        stringify: stringifyJSON\n    }\n];\nconst jsonError = {\n    default: true,\n    tag: '',\n    test: /^/,\n    resolve(str, onError) {\n        onError(`Unresolved plain scalar ${JSON.stringify(str)}`);\n        return str;\n    }\n};\nconst schema = [map.map, seq.seq].concat(jsonScalars, jsonError);\n\nexports.schema = schema;\n","'use strict';\n\nvar map = require('./common/map.js');\nvar _null = require('./common/null.js');\nvar seq = require('./common/seq.js');\nvar string = require('./common/string.js');\nvar bool = require('./core/bool.js');\nvar float = require('./core/float.js');\nvar int = require('./core/int.js');\nvar schema = require('./core/schema.js');\nvar schema$1 = require('./json/schema.js');\nvar binary = require('./yaml-1.1/binary.js');\nvar merge = require('./yaml-1.1/merge.js');\nvar omap = require('./yaml-1.1/omap.js');\nvar pairs = require('./yaml-1.1/pairs.js');\nvar schema$2 = require('./yaml-1.1/schema.js');\nvar set = require('./yaml-1.1/set.js');\nvar timestamp = require('./yaml-1.1/timestamp.js');\n\nconst schemas = new Map([\n    ['core', schema.schema],\n    ['failsafe', [map.map, seq.seq, string.string]],\n    ['json', schema$1.schema],\n    ['yaml11', schema$2.schema],\n    ['yaml-1.1', schema$2.schema]\n]);\nconst tagsByName = {\n    binary: binary.binary,\n    bool: bool.boolTag,\n    float: float.float,\n    floatExp: float.floatExp,\n    floatNaN: float.floatNaN,\n    floatTime: timestamp.floatTime,\n    int: int.int,\n    intHex: int.intHex,\n    intOct: int.intOct,\n    intTime: timestamp.intTime,\n    map: map.map,\n    merge: merge.merge,\n    null: _null.nullTag,\n    omap: omap.omap,\n    pairs: pairs.pairs,\n    seq: seq.seq,\n    set: set.set,\n    timestamp: timestamp.timestamp\n};\nconst coreKnownTags = {\n    'tag:yaml.org,2002:binary': binary.binary,\n    'tag:yaml.org,2002:merge': merge.merge,\n    'tag:yaml.org,2002:omap': omap.omap,\n    'tag:yaml.org,2002:pairs': pairs.pairs,\n    'tag:yaml.org,2002:set': set.set,\n    'tag:yaml.org,2002:timestamp': timestamp.timestamp\n};\nfunction getTags(customTags, schemaName, addMergeTag) {\n    const schemaTags = schemas.get(schemaName);\n    if (schemaTags && !customTags) {\n        return addMergeTag && !schemaTags.includes(merge.merge)\n            ? schemaTags.concat(merge.merge)\n            : schemaTags.slice();\n    }\n    let tags = schemaTags;\n    if (!tags) {\n        if (Array.isArray(customTags))\n            tags = [];\n        else {\n            const keys = Array.from(schemas.keys())\n                .filter(key => key !== 'yaml11')\n                .map(key => JSON.stringify(key))\n                .join(', ');\n            throw new Error(`Unknown schema \"${schemaName}\"; use one of ${keys} or define customTags array`);\n        }\n    }\n    if (Array.isArray(customTags)) {\n        for (const tag of customTags)\n            tags = tags.concat(tag);\n    }\n    else if (typeof customTags === 'function') {\n        tags = customTags(tags.slice());\n    }\n    if (addMergeTag)\n        tags = tags.concat(merge.merge);\n    return tags.reduce((tags, tag) => {\n        const tagObj = typeof tag === 'string' ? tagsByName[tag] : tag;\n        if (!tagObj) {\n            const tagName = JSON.stringify(tag);\n            const keys = Object.keys(tagsByName)\n                .map(key => JSON.stringify(key))\n                .join(', ');\n            throw new Error(`Unknown custom tag ${tagName}; use one of ${keys}`);\n        }\n        if (!tags.includes(tagObj))\n            tags.push(tagObj);\n        return tags;\n    }, []);\n}\n\nexports.coreKnownTags = coreKnownTags;\nexports.getTags = getTags;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyString = require('../../stringify/stringifyString.js');\n\nconst binary = {\n    identify: value => value instanceof Uint8Array, // Buffer inherits from Uint8Array\n    default: false,\n    tag: 'tag:yaml.org,2002:binary',\n    /**\n     * Returns a Buffer in node and an Uint8Array in browsers\n     *\n     * To use the resulting buffer as an image, you'll want to do something like:\n     *\n     *   const blob = new Blob([buffer], { type: 'image/jpeg' })\n     *   document.querySelector('#photo').src = URL.createObjectURL(blob)\n     */\n    resolve(src, onError) {\n        if (typeof Buffer === 'function') {\n            return Buffer.from(src, 'base64');\n        }\n        else if (typeof atob === 'function') {\n            // On IE 11, atob() can't handle newlines\n            const str = atob(src.replace(/[\\n\\r]/g, ''));\n            const buffer = new Uint8Array(str.length);\n            for (let i = 0; i < str.length; ++i)\n                buffer[i] = str.charCodeAt(i);\n            return buffer;\n        }\n        else {\n            onError('This environment does not support reading binary tags; either Buffer or atob is required');\n            return src;\n        }\n    },\n    stringify({ comment, type, value }, ctx, onComment, onChompKeep) {\n        const buf = value; // checked earlier by binary.identify()\n        let str;\n        if (typeof Buffer === 'function') {\n            str =\n                buf instanceof Buffer\n                    ? buf.toString('base64')\n                    : Buffer.from(buf.buffer).toString('base64');\n        }\n        else if (typeof btoa === 'function') {\n            let s = '';\n            for (let i = 0; i < buf.length; ++i)\n                s += String.fromCharCode(buf[i]);\n            str = btoa(s);\n        }\n        else {\n            throw new Error('This environment does not support writing binary tags; either Buffer or btoa is required');\n        }\n        if (!type)\n            type = Scalar.Scalar.BLOCK_LITERAL;\n        if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n            const lineWidth = Math.max(ctx.options.lineWidth - ctx.indent.length, ctx.options.minContentWidth);\n            const n = Math.ceil(str.length / lineWidth);\n            const lines = new Array(n);\n            for (let i = 0, o = 0; i < n; ++i, o += lineWidth) {\n                lines[i] = str.substr(o, lineWidth);\n            }\n            str = lines.join(type === Scalar.Scalar.BLOCK_LITERAL ? '\\n' : ' ');\n        }\n        return stringifyString.stringifyString({ comment, type, value: str }, ctx, onComment, onChompKeep);\n    }\n};\n\nexports.binary = binary;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\n\nfunction boolStringify({ value, source }, ctx) {\n    const boolObj = value ? trueTag : falseTag;\n    if (source && boolObj.test.test(source))\n        return source;\n    return value ? ctx.options.trueStr : ctx.options.falseStr;\n}\nconst trueTag = {\n    identify: value => value === true,\n    default: true,\n    tag: 'tag:yaml.org,2002:bool',\n    test: /^(?:Y|y|[Yy]es|YES|[Tt]rue|TRUE|[Oo]n|ON)$/,\n    resolve: () => new Scalar.Scalar(true),\n    stringify: boolStringify\n};\nconst falseTag = {\n    identify: value => value === false,\n    default: true,\n    tag: 'tag:yaml.org,2002:bool',\n    test: /^(?:N|n|[Nn]o|NO|[Ff]alse|FALSE|[Oo]ff|OFF)$/,\n    resolve: () => new Scalar.Scalar(false),\n    stringify: boolStringify\n};\n\nexports.falseTag = falseTag;\nexports.trueTag = trueTag;\n","'use strict';\n\nvar Scalar = require('../../nodes/Scalar.js');\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst floatNaN = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    test: /^(?:[-+]?\\.(?:inf|Inf|INF)|\\.nan|\\.NaN|\\.NAN)$/,\n    resolve: (str) => str.slice(-3).toLowerCase() === 'nan'\n        ? NaN\n        : str[0] === '-'\n            ? Number.NEGATIVE_INFINITY\n            : Number.POSITIVE_INFINITY,\n    stringify: stringifyNumber.stringifyNumber\n};\nconst floatExp = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    format: 'EXP',\n    test: /^[-+]?(?:[0-9][0-9_]*)?(?:\\.[0-9_]*)?[eE][-+]?[0-9]+$/,\n    resolve: (str) => parseFloat(str.replace(/_/g, '')),\n    stringify(node) {\n        const num = Number(node.value);\n        return isFinite(num) ? num.toExponential() : stringifyNumber.stringifyNumber(node);\n    }\n};\nconst float = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    test: /^[-+]?(?:[0-9][0-9_]*)?\\.[0-9_]*$/,\n    resolve(str) {\n        const node = new Scalar.Scalar(parseFloat(str.replace(/_/g, '')));\n        const dot = str.indexOf('.');\n        if (dot !== -1) {\n            const f = str.substring(dot + 1).replace(/_/g, '');\n            if (f[f.length - 1] === '0')\n                node.minFractionDigits = f.length;\n        }\n        return node;\n    },\n    stringify: stringifyNumber.stringifyNumber\n};\n\nexports.float = float;\nexports.floatExp = floatExp;\nexports.floatNaN = floatNaN;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\nconst intIdentify = (value) => typeof value === 'bigint' || Number.isInteger(value);\nfunction intResolve(str, offset, radix, { intAsBigInt }) {\n    const sign = str[0];\n    if (sign === '-' || sign === '+')\n        offset += 1;\n    str = str.substring(offset).replace(/_/g, '');\n    if (intAsBigInt) {\n        switch (radix) {\n            case 2:\n                str = `0b${str}`;\n                break;\n            case 8:\n                str = `0o${str}`;\n                break;\n            case 16:\n                str = `0x${str}`;\n                break;\n        }\n        const n = BigInt(str);\n        return sign === '-' ? BigInt(-1) * n : n;\n    }\n    const n = parseInt(str, radix);\n    return sign === '-' ? -1 * n : n;\n}\nfunction intStringify(node, radix, prefix) {\n    const { value } = node;\n    if (intIdentify(value)) {\n        const str = value.toString(radix);\n        return value < 0 ? '-' + prefix + str.substr(1) : prefix + str;\n    }\n    return stringifyNumber.stringifyNumber(node);\n}\nconst intBin = {\n    identify: intIdentify,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'BIN',\n    test: /^[-+]?0b[0-1_]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 2, 2, opt),\n    stringify: node => intStringify(node, 2, '0b')\n};\nconst intOct = {\n    identify: intIdentify,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'OCT',\n    test: /^[-+]?0[0-7_]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 1, 8, opt),\n    stringify: node => intStringify(node, 8, '0')\n};\nconst int = {\n    identify: intIdentify,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    test: /^[-+]?[0-9][0-9_]*$/,\n    resolve: (str, _onError, opt) => intResolve(str, 0, 10, opt),\n    stringify: stringifyNumber.stringifyNumber\n};\nconst intHex = {\n    identify: intIdentify,\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'HEX',\n    test: /^[-+]?0x[0-9a-fA-F_]+$/,\n    resolve: (str, _onError, opt) => intResolve(str, 2, 16, opt),\n    stringify: node => intStringify(node, 16, '0x')\n};\n\nexports.int = int;\nexports.intBin = intBin;\nexports.intHex = intHex;\nexports.intOct = intOct;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar Scalar = require('../../nodes/Scalar.js');\n\n// If the value associated with a merge key is a single mapping node, each of\n// its key/value pairs is inserted into the current mapping, unless the key\n// already exists in it. If the value associated with the merge key is a\n// sequence, then this sequence is expected to contain mapping nodes and each\n// of these nodes is merged in turn according to its order in the sequence.\n// Keys in mapping nodes earlier in the sequence override keys specified in\n// later mapping nodes. -- http://yaml.org/type/merge.html\nconst MERGE_KEY = '<<';\nconst merge = {\n    identify: value => value === MERGE_KEY ||\n        (typeof value === 'symbol' && value.description === MERGE_KEY),\n    default: 'key',\n    tag: 'tag:yaml.org,2002:merge',\n    test: /^<<$/,\n    resolve: () => Object.assign(new Scalar.Scalar(Symbol(MERGE_KEY)), {\n        addToJSMap: addMergeToJSMap\n    }),\n    stringify: () => MERGE_KEY\n};\nconst isMergeKey = (ctx, key) => (merge.identify(key) ||\n    (identity.isScalar(key) &&\n        (!key.type || key.type === Scalar.Scalar.PLAIN) &&\n        merge.identify(key.value))) &&\n    ctx?.doc.schema.tags.some(tag => tag.tag === merge.tag && tag.default);\nfunction addMergeToJSMap(ctx, map, value) {\n    value = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;\n    if (identity.isSeq(value))\n        for (const it of value.items)\n            mergeValue(ctx, map, it);\n    else if (Array.isArray(value))\n        for (const it of value)\n            mergeValue(ctx, map, it);\n    else\n        mergeValue(ctx, map, value);\n}\nfunction mergeValue(ctx, map, value) {\n    const source = ctx && identity.isAlias(value) ? value.resolve(ctx.doc) : value;\n    if (!identity.isMap(source))\n        throw new Error('Merge sources must be maps or map aliases');\n    const srcMap = source.toJSON(null, ctx, Map);\n    for (const [key, value] of srcMap) {\n        if (map instanceof Map) {\n            if (!map.has(key))\n                map.set(key, value);\n        }\n        else if (map instanceof Set) {\n            map.add(key);\n        }\n        else if (!Object.prototype.hasOwnProperty.call(map, key)) {\n            Object.defineProperty(map, key, {\n                value,\n                writable: true,\n                enumerable: true,\n                configurable: true\n            });\n        }\n    }\n    return map;\n}\n\nexports.addMergeToJSMap = addMergeToJSMap;\nexports.isMergeKey = isMergeKey;\nexports.merge = merge;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar toJS = require('../../nodes/toJS.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\nvar pairs = require('./pairs.js');\n\nclass YAMLOMap extends YAMLSeq.YAMLSeq {\n    constructor() {\n        super();\n        this.add = YAMLMap.YAMLMap.prototype.add.bind(this);\n        this.delete = YAMLMap.YAMLMap.prototype.delete.bind(this);\n        this.get = YAMLMap.YAMLMap.prototype.get.bind(this);\n        this.has = YAMLMap.YAMLMap.prototype.has.bind(this);\n        this.set = YAMLMap.YAMLMap.prototype.set.bind(this);\n        this.tag = YAMLOMap.tag;\n    }\n    /**\n     * If `ctx` is given, the return type is actually `Map`,\n     * but TypeScript won't allow widening the signature of a child method.\n     */\n    toJSON(_, ctx) {\n        if (!ctx)\n            return super.toJSON(_);\n        const map = new Map();\n        if (ctx?.onCreate)\n            ctx.onCreate(map);\n        for (const pair of this.items) {\n            let key, value;\n            if (identity.isPair(pair)) {\n                key = toJS.toJS(pair.key, '', ctx);\n                value = toJS.toJS(pair.value, key, ctx);\n            }\n            else {\n                key = toJS.toJS(pair, '', ctx);\n            }\n            if (map.has(key))\n                throw new Error('Ordered maps must not include duplicate keys');\n            map.set(key, value);\n        }\n        return map;\n    }\n    static from(schema, iterable, ctx) {\n        const pairs$1 = pairs.createPairs(schema, iterable, ctx);\n        const omap = new this();\n        omap.items = pairs$1.items;\n        return omap;\n    }\n}\nYAMLOMap.tag = 'tag:yaml.org,2002:omap';\nconst omap = {\n    collection: 'seq',\n    identify: value => value instanceof Map,\n    nodeClass: YAMLOMap,\n    default: false,\n    tag: 'tag:yaml.org,2002:omap',\n    resolve(seq, onError) {\n        const pairs$1 = pairs.resolvePairs(seq, onError);\n        const seenKeys = [];\n        for (const { key } of pairs$1.items) {\n            if (identity.isScalar(key)) {\n                if (seenKeys.includes(key.value)) {\n                    onError(`Ordered maps must not include duplicate keys: ${key.value}`);\n                }\n                else {\n                    seenKeys.push(key.value);\n                }\n            }\n        }\n        return Object.assign(new YAMLOMap(), pairs$1);\n    },\n    createNode: (schema, iterable, ctx) => YAMLOMap.from(schema, iterable, ctx)\n};\n\nexports.YAMLOMap = YAMLOMap;\nexports.omap = omap;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar Pair = require('../../nodes/Pair.js');\nvar Scalar = require('../../nodes/Scalar.js');\nvar YAMLSeq = require('../../nodes/YAMLSeq.js');\n\nfunction resolvePairs(seq, onError) {\n    if (identity.isSeq(seq)) {\n        for (let i = 0; i < seq.items.length; ++i) {\n            let item = seq.items[i];\n            if (identity.isPair(item))\n                continue;\n            else if (identity.isMap(item)) {\n                if (item.items.length > 1)\n                    onError('Each pair must have its own sequence indicator');\n                const pair = item.items[0] || new Pair.Pair(new Scalar.Scalar(null));\n                if (item.commentBefore)\n                    pair.key.commentBefore = pair.key.commentBefore\n                        ? `${item.commentBefore}\\n${pair.key.commentBefore}`\n                        : item.commentBefore;\n                if (item.comment) {\n                    const cn = pair.value ?? pair.key;\n                    cn.comment = cn.comment\n                        ? `${item.comment}\\n${cn.comment}`\n                        : item.comment;\n                }\n                item = pair;\n            }\n            seq.items[i] = identity.isPair(item) ? item : new Pair.Pair(item);\n        }\n    }\n    else\n        onError('Expected a sequence for this tag');\n    return seq;\n}\nfunction createPairs(schema, iterable, ctx) {\n    const { replacer } = ctx;\n    const pairs = new YAMLSeq.YAMLSeq(schema);\n    pairs.tag = 'tag:yaml.org,2002:pairs';\n    let i = 0;\n    if (iterable && Symbol.iterator in Object(iterable))\n        for (let it of iterable) {\n            if (typeof replacer === 'function')\n                it = replacer.call(iterable, String(i++), it);\n            let key, value;\n            if (Array.isArray(it)) {\n                if (it.length === 2) {\n                    key = it[0];\n                    value = it[1];\n                }\n                else\n                    throw new TypeError(`Expected [key, value] tuple: ${it}`);\n            }\n            else if (it && it instanceof Object) {\n                const keys = Object.keys(it);\n                if (keys.length === 1) {\n                    key = keys[0];\n                    value = it[key];\n                }\n                else {\n                    throw new TypeError(`Expected tuple with one key, not ${keys.length} keys`);\n                }\n            }\n            else {\n                key = it;\n            }\n            pairs.items.push(Pair.createPair(key, value, ctx));\n        }\n    return pairs;\n}\nconst pairs = {\n    collection: 'seq',\n    default: false,\n    tag: 'tag:yaml.org,2002:pairs',\n    resolve: resolvePairs,\n    createNode: createPairs\n};\n\nexports.createPairs = createPairs;\nexports.pairs = pairs;\nexports.resolvePairs = resolvePairs;\n","'use strict';\n\nvar map = require('../common/map.js');\nvar _null = require('../common/null.js');\nvar seq = require('../common/seq.js');\nvar string = require('../common/string.js');\nvar binary = require('./binary.js');\nvar bool = require('./bool.js');\nvar float = require('./float.js');\nvar int = require('./int.js');\nvar merge = require('./merge.js');\nvar omap = require('./omap.js');\nvar pairs = require('./pairs.js');\nvar set = require('./set.js');\nvar timestamp = require('./timestamp.js');\n\nconst schema = [\n    map.map,\n    seq.seq,\n    string.string,\n    _null.nullTag,\n    bool.trueTag,\n    bool.falseTag,\n    int.intBin,\n    int.intOct,\n    int.int,\n    int.intHex,\n    float.floatNaN,\n    float.floatExp,\n    float.float,\n    binary.binary,\n    merge.merge,\n    omap.omap,\n    pairs.pairs,\n    set.set,\n    timestamp.intTime,\n    timestamp.floatTime,\n    timestamp.timestamp\n];\n\nexports.schema = schema;\n","'use strict';\n\nvar identity = require('../../nodes/identity.js');\nvar Pair = require('../../nodes/Pair.js');\nvar YAMLMap = require('../../nodes/YAMLMap.js');\n\nclass YAMLSet extends YAMLMap.YAMLMap {\n    constructor(schema) {\n        super(schema);\n        this.tag = YAMLSet.tag;\n    }\n    add(key) {\n        let pair;\n        if (identity.isPair(key))\n            pair = key;\n        else if (key &&\n            typeof key === 'object' &&\n            'key' in key &&\n            'value' in key &&\n            key.value === null)\n            pair = new Pair.Pair(key.key, null);\n        else\n            pair = new Pair.Pair(key, null);\n        const prev = YAMLMap.findPair(this.items, pair.key);\n        if (!prev)\n            this.items.push(pair);\n    }\n    /**\n     * If `keepPair` is `true`, returns the Pair matching `key`.\n     * Otherwise, returns the value of that Pair's key.\n     */\n    get(key, keepPair) {\n        const pair = YAMLMap.findPair(this.items, key);\n        return !keepPair && identity.isPair(pair)\n            ? identity.isScalar(pair.key)\n                ? pair.key.value\n                : pair.key\n            : pair;\n    }\n    set(key, value) {\n        if (typeof value !== 'boolean')\n            throw new Error(`Expected boolean value for set(key, value) in a YAML set, not ${typeof value}`);\n        const prev = YAMLMap.findPair(this.items, key);\n        if (prev && !value) {\n            this.items.splice(this.items.indexOf(prev), 1);\n        }\n        else if (!prev && value) {\n            this.items.push(new Pair.Pair(key));\n        }\n    }\n    toJSON(_, ctx) {\n        return super.toJSON(_, ctx, Set);\n    }\n    toString(ctx, onComment, onChompKeep) {\n        if (!ctx)\n            return JSON.stringify(this);\n        if (this.hasAllNullValues(true))\n            return super.toString(Object.assign({}, ctx, { allNullValues: true }), onComment, onChompKeep);\n        else\n            throw new Error('Set items must all have null values');\n    }\n    static from(schema, iterable, ctx) {\n        const { replacer } = ctx;\n        const set = new this(schema);\n        if (iterable && Symbol.iterator in Object(iterable))\n            for (let value of iterable) {\n                if (typeof replacer === 'function')\n                    value = replacer.call(iterable, value, value);\n                set.items.push(Pair.createPair(value, null, ctx));\n            }\n        return set;\n    }\n}\nYAMLSet.tag = 'tag:yaml.org,2002:set';\nconst set = {\n    collection: 'map',\n    identify: value => value instanceof Set,\n    nodeClass: YAMLSet,\n    default: false,\n    tag: 'tag:yaml.org,2002:set',\n    createNode: (schema, iterable, ctx) => YAMLSet.from(schema, iterable, ctx),\n    resolve(map, onError) {\n        if (identity.isMap(map)) {\n            if (map.hasAllNullValues(true))\n                return Object.assign(new YAMLSet(), map);\n            else\n                onError('Set items must all have null values');\n        }\n        else\n            onError('Expected a mapping for this tag');\n        return map;\n    }\n};\n\nexports.YAMLSet = YAMLSet;\nexports.set = set;\n","'use strict';\n\nvar stringifyNumber = require('../../stringify/stringifyNumber.js');\n\n/** Internal types handle bigint as number, because TS can't figure it out. */\nfunction parseSexagesimal(str, asBigInt) {\n    const sign = str[0];\n    const parts = sign === '-' || sign === '+' ? str.substring(1) : str;\n    const num = (n) => asBigInt ? BigInt(n) : Number(n);\n    const res = parts\n        .replace(/_/g, '')\n        .split(':')\n        .reduce((res, p) => res * num(60) + num(p), num(0));\n    return (sign === '-' ? num(-1) * res : res);\n}\n/**\n * hhhh:mm:ss.sss\n *\n * Internal types handle bigint as number, because TS can't figure it out.\n */\nfunction stringifySexagesimal(node) {\n    let { value } = node;\n    let num = (n) => n;\n    if (typeof value === 'bigint')\n        num = n => BigInt(n);\n    else if (isNaN(value) || !isFinite(value))\n        return stringifyNumber.stringifyNumber(node);\n    let sign = '';\n    if (value < 0) {\n        sign = '-';\n        value *= num(-1);\n    }\n    const _60 = num(60);\n    const parts = [value % _60]; // seconds, including ms\n    if (value < 60) {\n        parts.unshift(0); // at least one : is required\n    }\n    else {\n        value = (value - parts[0]) / _60;\n        parts.unshift(value % _60); // minutes\n        if (value >= 60) {\n            value = (value - parts[0]) / _60;\n            parts.unshift(value); // hours\n        }\n    }\n    return (sign +\n        parts\n            .map(n => String(n).padStart(2, '0'))\n            .join(':')\n            .replace(/000000\\d*$/, '') // % 60 may introduce error\n    );\n}\nconst intTime = {\n    identify: value => typeof value === 'bigint' || Number.isInteger(value),\n    default: true,\n    tag: 'tag:yaml.org,2002:int',\n    format: 'TIME',\n    test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+$/,\n    resolve: (str, _onError, { intAsBigInt }) => parseSexagesimal(str, intAsBigInt),\n    stringify: stringifySexagesimal\n};\nconst floatTime = {\n    identify: value => typeof value === 'number',\n    default: true,\n    tag: 'tag:yaml.org,2002:float',\n    format: 'TIME',\n    test: /^[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*$/,\n    resolve: str => parseSexagesimal(str, false),\n    stringify: stringifySexagesimal\n};\nconst timestamp = {\n    identify: value => value instanceof Date,\n    default: true,\n    tag: 'tag:yaml.org,2002:timestamp',\n    // If the time zone is omitted, the timestamp is assumed to be specified in UTC. The time part\n    // may be omitted altogether, resulting in a date format. In such a case, the time part is\n    // assumed to be 00:00:00Z (start of day, UTC).\n    test: RegExp('^([0-9]{4})-([0-9]{1,2})-([0-9]{1,2})' + // YYYY-Mm-Dd\n        '(?:' + // time is optional\n        '(?:t|T|[ \\\\t]+)' + // t | T | whitespace\n        '([0-9]{1,2}):([0-9]{1,2}):([0-9]{1,2}(\\\\.[0-9]+)?)' + // Hh:Mm:Ss(.ss)?\n        '(?:[ \\\\t]*(Z|[-+][012]?[0-9](?::[0-9]{2})?))?' + // Z | +5 | -03:30\n        ')?$'),\n    resolve(str) {\n        const match = str.match(timestamp.test);\n        if (!match)\n            throw new Error('!!timestamp expects a date, starting with yyyy-mm-dd');\n        const [, year, month, day, hour, minute, second] = match.map(Number);\n        const millisec = match[7] ? Number((match[7] + '00').substr(1, 3)) : 0;\n        let date = Date.UTC(year, month - 1, day, hour || 0, minute || 0, second || 0, millisec);\n        const tz = match[8];\n        if (tz && tz !== 'Z') {\n            let d = parseSexagesimal(tz, false);\n            if (Math.abs(d) < 30)\n                d *= 60;\n            date -= 60000 * d;\n        }\n        return new Date(date);\n    },\n    stringify: ({ value }) => value.toISOString().replace(/(T00:00:00)?\\.000Z$/, '')\n};\n\nexports.floatTime = floatTime;\nexports.intTime = intTime;\nexports.timestamp = timestamp;\n","'use strict';\n\nconst FOLD_FLOW = 'flow';\nconst FOLD_BLOCK = 'block';\nconst FOLD_QUOTED = 'quoted';\n/**\n * Tries to keep input at up to `lineWidth` characters, splitting only on spaces\n * not followed by newlines or spaces unless `mode` is `'quoted'`. Lines are\n * terminated with `\\n` and started with `indent`.\n */\nfunction foldFlowLines(text, indent, mode = 'flow', { indentAtStart, lineWidth = 80, minContentWidth = 20, onFold, onOverflow } = {}) {\n    if (!lineWidth || lineWidth < 0)\n        return text;\n    if (lineWidth < minContentWidth)\n        minContentWidth = 0;\n    const endStep = Math.max(1 + minContentWidth, 1 + lineWidth - indent.length);\n    if (text.length <= endStep)\n        return text;\n    const folds = [];\n    const escapedFolds = {};\n    let end = lineWidth - indent.length;\n    if (typeof indentAtStart === 'number') {\n        if (indentAtStart > lineWidth - Math.max(2, minContentWidth))\n            folds.push(0);\n        else\n            end = lineWidth - indentAtStart;\n    }\n    let split = undefined;\n    let prev = undefined;\n    let overflow = false;\n    let i = -1;\n    let escStart = -1;\n    let escEnd = -1;\n    if (mode === FOLD_BLOCK) {\n        i = consumeMoreIndentedLines(text, i, indent.length);\n        if (i !== -1)\n            end = i + endStep;\n    }\n    for (let ch; (ch = text[(i += 1)]);) {\n        if (mode === FOLD_QUOTED && ch === '\\\\') {\n            escStart = i;\n            switch (text[i + 1]) {\n                case 'x':\n                    i += 3;\n                    break;\n                case 'u':\n                    i += 5;\n                    break;\n                case 'U':\n                    i += 9;\n                    break;\n                default:\n                    i += 1;\n            }\n            escEnd = i;\n        }\n        if (ch === '\\n') {\n            if (mode === FOLD_BLOCK)\n                i = consumeMoreIndentedLines(text, i, indent.length);\n            end = i + indent.length + endStep;\n            split = undefined;\n        }\n        else {\n            if (ch === ' ' &&\n                prev &&\n                prev !== ' ' &&\n                prev !== '\\n' &&\n                prev !== '\\t') {\n                // space surrounded by non-space can be replaced with newline + indent\n                const next = text[i + 1];\n                if (next && next !== ' ' && next !== '\\n' && next !== '\\t')\n                    split = i;\n            }\n            if (i >= end) {\n                if (split) {\n                    folds.push(split);\n                    end = split + endStep;\n                    split = undefined;\n                }\n                else if (mode === FOLD_QUOTED) {\n                    // white-space collected at end may stretch past lineWidth\n                    while (prev === ' ' || prev === '\\t') {\n                        prev = ch;\n                        ch = text[(i += 1)];\n                        overflow = true;\n                    }\n                    // Account for newline escape, but don't break preceding escape\n                    const j = i > escEnd + 1 ? i - 2 : escStart - 1;\n                    // Bail out if lineWidth & minContentWidth are shorter than an escape string\n                    if (escapedFolds[j])\n                        return text;\n                    folds.push(j);\n                    escapedFolds[j] = true;\n                    end = j + endStep;\n                    split = undefined;\n                }\n                else {\n                    overflow = true;\n                }\n            }\n        }\n        prev = ch;\n    }\n    if (overflow && onOverflow)\n        onOverflow();\n    if (folds.length === 0)\n        return text;\n    if (onFold)\n        onFold();\n    let res = text.slice(0, folds[0]);\n    for (let i = 0; i < folds.length; ++i) {\n        const fold = folds[i];\n        const end = folds[i + 1] || text.length;\n        if (fold === 0)\n            res = `\\n${indent}${text.slice(0, end)}`;\n        else {\n            if (mode === FOLD_QUOTED && escapedFolds[fold])\n                res += `${text[fold]}\\\\`;\n            res += `\\n${indent}${text.slice(fold + 1, end)}`;\n        }\n    }\n    return res;\n}\n/**\n * Presumes `i + 1` is at the start of a line\n * @returns index of last newline in more-indented block\n */\nfunction consumeMoreIndentedLines(text, i, indent) {\n    let end = i;\n    let start = i + 1;\n    let ch = text[start];\n    while (ch === ' ' || ch === '\\t') {\n        if (i < start + indent) {\n            ch = text[++i];\n        }\n        else {\n            do {\n                ch = text[++i];\n            } while (ch && ch !== '\\n');\n            end = i;\n            start = i + 1;\n            ch = text[start];\n        }\n    }\n    return end;\n}\n\nexports.FOLD_BLOCK = FOLD_BLOCK;\nexports.FOLD_FLOW = FOLD_FLOW;\nexports.FOLD_QUOTED = FOLD_QUOTED;\nexports.foldFlowLines = foldFlowLines;\n","'use strict';\n\nvar anchors = require('../doc/anchors.js');\nvar identity = require('../nodes/identity.js');\nvar stringifyComment = require('./stringifyComment.js');\nvar stringifyString = require('./stringifyString.js');\n\nfunction createStringifyContext(doc, options) {\n    const opt = Object.assign({\n        blockQuote: true,\n        commentString: stringifyComment.stringifyComment,\n        defaultKeyType: null,\n        defaultStringType: 'PLAIN',\n        directives: null,\n        doubleQuotedAsJSON: false,\n        doubleQuotedMinMultiLineLength: 40,\n        falseStr: 'false',\n        flowCollectionPadding: true,\n        indentSeq: true,\n        lineWidth: 80,\n        minContentWidth: 20,\n        nullStr: 'null',\n        simpleKeys: false,\n        singleQuote: null,\n        trueStr: 'true',\n        verifyAliasOrder: true\n    }, doc.schema.toStringOptions, options);\n    let inFlow;\n    switch (opt.collectionStyle) {\n        case 'block':\n            inFlow = false;\n            break;\n        case 'flow':\n            inFlow = true;\n            break;\n        default:\n            inFlow = null;\n    }\n    return {\n        anchors: new Set(),\n        doc,\n        flowCollectionPadding: opt.flowCollectionPadding ? ' ' : '',\n        indent: '',\n        indentStep: typeof opt.indent === 'number' ? ' '.repeat(opt.indent) : '  ',\n        inFlow,\n        options: opt\n    };\n}\nfunction getTagObject(tags, item) {\n    if (item.tag) {\n        const match = tags.filter(t => t.tag === item.tag);\n        if (match.length > 0)\n            return match.find(t => t.format === item.format) ?? match[0];\n    }\n    let tagObj = undefined;\n    let obj;\n    if (identity.isScalar(item)) {\n        obj = item.value;\n        let match = tags.filter(t => t.identify?.(obj));\n        if (match.length > 1) {\n            const testMatch = match.filter(t => t.test);\n            if (testMatch.length > 0)\n                match = testMatch;\n        }\n        tagObj =\n            match.find(t => t.format === item.format) ?? match.find(t => !t.format);\n    }\n    else {\n        obj = item;\n        tagObj = tags.find(t => t.nodeClass && obj instanceof t.nodeClass);\n    }\n    if (!tagObj) {\n        const name = obj?.constructor?.name ?? typeof obj;\n        throw new Error(`Tag not resolved for ${name} value`);\n    }\n    return tagObj;\n}\n// needs to be called before value stringifier to allow for circular anchor refs\nfunction stringifyProps(node, tagObj, { anchors: anchors$1, doc }) {\n    if (!doc.directives)\n        return '';\n    const props = [];\n    const anchor = (identity.isScalar(node) || identity.isCollection(node)) && node.anchor;\n    if (anchor && anchors.anchorIsValid(anchor)) {\n        anchors$1.add(anchor);\n        props.push(`&${anchor}`);\n    }\n    const tag = node.tag ? node.tag : tagObj.default ? null : tagObj.tag;\n    if (tag)\n        props.push(doc.directives.tagString(tag));\n    return props.join(' ');\n}\nfunction stringify(item, ctx, onComment, onChompKeep) {\n    if (identity.isPair(item))\n        return item.toString(ctx, onComment, onChompKeep);\n    if (identity.isAlias(item)) {\n        if (ctx.doc.directives)\n            return item.toString(ctx);\n        if (ctx.resolvedAliases?.has(item)) {\n            throw new TypeError(`Cannot stringify circular structure without alias nodes`);\n        }\n        else {\n            if (ctx.resolvedAliases)\n                ctx.resolvedAliases.add(item);\n            else\n                ctx.resolvedAliases = new Set([item]);\n            item = item.resolve(ctx.doc);\n        }\n    }\n    let tagObj = undefined;\n    const node = identity.isNode(item)\n        ? item\n        : ctx.doc.createNode(item, { onTagObj: o => (tagObj = o) });\n    if (!tagObj)\n        tagObj = getTagObject(ctx.doc.schema.tags, node);\n    const props = stringifyProps(node, tagObj, ctx);\n    if (props.length > 0)\n        ctx.indentAtStart = (ctx.indentAtStart ?? 0) + props.length + 1;\n    const str = typeof tagObj.stringify === 'function'\n        ? tagObj.stringify(node, ctx, onComment, onChompKeep)\n        : identity.isScalar(node)\n            ? stringifyString.stringifyString(node, ctx, onComment, onChompKeep)\n            : node.toString(ctx, onComment, onChompKeep);\n    if (!props)\n        return str;\n    return identity.isScalar(node) || str[0] === '{' || str[0] === '['\n        ? `${props} ${str}`\n        : `${props}\\n${ctx.indent}${str}`;\n}\n\nexports.createStringifyContext = createStringifyContext;\nexports.stringify = stringify;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyCollection(collection, ctx, options) {\n    const flow = ctx.inFlow ?? collection.flow;\n    const stringify = flow ? stringifyFlowCollection : stringifyBlockCollection;\n    return stringify(collection, ctx, options);\n}\nfunction stringifyBlockCollection({ comment, items }, ctx, { blockItemPrefix, flowChars, itemIndent, onChompKeep, onComment }) {\n    const { indent, options: { commentString } } = ctx;\n    const itemCtx = Object.assign({}, ctx, { indent: itemIndent, type: null });\n    let chompKeep = false; // flag for the preceding node's status\n    const lines = [];\n    for (let i = 0; i < items.length; ++i) {\n        const item = items[i];\n        let comment = null;\n        if (identity.isNode(item)) {\n            if (!chompKeep && item.spaceBefore)\n                lines.push('');\n            addCommentBefore(ctx, lines, item.commentBefore, chompKeep);\n            if (item.comment)\n                comment = item.comment;\n        }\n        else if (identity.isPair(item)) {\n            const ik = identity.isNode(item.key) ? item.key : null;\n            if (ik) {\n                if (!chompKeep && ik.spaceBefore)\n                    lines.push('');\n                addCommentBefore(ctx, lines, ik.commentBefore, chompKeep);\n            }\n        }\n        chompKeep = false;\n        let str = stringify.stringify(item, itemCtx, () => (comment = null), () => (chompKeep = true));\n        if (comment)\n            str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n        if (chompKeep && comment)\n            chompKeep = false;\n        lines.push(blockItemPrefix + str);\n    }\n    let str;\n    if (lines.length === 0) {\n        str = flowChars.start + flowChars.end;\n    }\n    else {\n        str = lines[0];\n        for (let i = 1; i < lines.length; ++i) {\n            const line = lines[i];\n            str += line ? `\\n${indent}${line}` : '\\n';\n        }\n    }\n    if (comment) {\n        str += '\\n' + stringifyComment.indentComment(commentString(comment), indent);\n        if (onComment)\n            onComment();\n    }\n    else if (chompKeep && onChompKeep)\n        onChompKeep();\n    return str;\n}\nfunction stringifyFlowCollection({ items }, ctx, { flowChars, itemIndent }) {\n    const { indent, indentStep, flowCollectionPadding: fcPadding, options: { commentString } } = ctx;\n    itemIndent += indentStep;\n    const itemCtx = Object.assign({}, ctx, {\n        indent: itemIndent,\n        inFlow: true,\n        type: null\n    });\n    let reqNewline = false;\n    let linesAtValue = 0;\n    const lines = [];\n    for (let i = 0; i < items.length; ++i) {\n        const item = items[i];\n        let comment = null;\n        if (identity.isNode(item)) {\n            if (item.spaceBefore)\n                lines.push('');\n            addCommentBefore(ctx, lines, item.commentBefore, false);\n            if (item.comment)\n                comment = item.comment;\n        }\n        else if (identity.isPair(item)) {\n            const ik = identity.isNode(item.key) ? item.key : null;\n            if (ik) {\n                if (ik.spaceBefore)\n                    lines.push('');\n                addCommentBefore(ctx, lines, ik.commentBefore, false);\n                if (ik.comment)\n                    reqNewline = true;\n            }\n            const iv = identity.isNode(item.value) ? item.value : null;\n            if (iv) {\n                if (iv.comment)\n                    comment = iv.comment;\n                if (iv.commentBefore)\n                    reqNewline = true;\n            }\n            else if (item.value == null && ik?.comment) {\n                comment = ik.comment;\n            }\n        }\n        if (comment)\n            reqNewline = true;\n        let str = stringify.stringify(item, itemCtx, () => (comment = null));\n        if (i < items.length - 1)\n            str += ',';\n        if (comment)\n            str += stringifyComment.lineComment(str, itemIndent, commentString(comment));\n        if (!reqNewline && (lines.length > linesAtValue || str.includes('\\n')))\n            reqNewline = true;\n        lines.push(str);\n        linesAtValue = lines.length;\n    }\n    const { start, end } = flowChars;\n    if (lines.length === 0) {\n        return start + end;\n    }\n    else {\n        if (!reqNewline) {\n            const len = lines.reduce((sum, line) => sum + line.length + 2, 2);\n            reqNewline = ctx.options.lineWidth > 0 && len > ctx.options.lineWidth;\n        }\n        if (reqNewline) {\n            let str = start;\n            for (const line of lines)\n                str += line ? `\\n${indentStep}${indent}${line}` : '\\n';\n            return `${str}\\n${indent}${end}`;\n        }\n        else {\n            return `${start}${fcPadding}${lines.join(' ')}${fcPadding}${end}`;\n        }\n    }\n}\nfunction addCommentBefore({ indent, options: { commentString } }, lines, comment, chompKeep) {\n    if (comment && chompKeep)\n        comment = comment.replace(/^\\n+/, '');\n    if (comment) {\n        const ic = stringifyComment.indentComment(commentString(comment), indent);\n        lines.push(ic.trimStart()); // Avoid double indent on first line\n    }\n}\n\nexports.stringifyCollection = stringifyCollection;\n","'use strict';\n\n/**\n * Stringifies a comment.\n *\n * Empty comment lines are left empty,\n * lines consisting of a single space are replaced by `#`,\n * and all other lines are prefixed with a `#`.\n */\nconst stringifyComment = (str) => str.replace(/^(?!$)(?: $)?/gm, '#');\nfunction indentComment(comment, indent) {\n    if (/^\\n+$/.test(comment))\n        return comment.substring(1);\n    return indent ? comment.replace(/^(?! *$)/gm, indent) : comment;\n}\nconst lineComment = (str, indent, comment) => str.endsWith('\\n')\n    ? indentComment(comment, indent)\n    : comment.includes('\\n')\n        ? '\\n' + indentComment(comment, indent)\n        : (str.endsWith(' ') ? '' : ' ') + comment;\n\nexports.indentComment = indentComment;\nexports.lineComment = lineComment;\nexports.stringifyComment = stringifyComment;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyDocument(doc, options) {\n    const lines = [];\n    let hasDirectives = options.directives === true;\n    if (options.directives !== false && doc.directives) {\n        const dir = doc.directives.toString(doc);\n        if (dir) {\n            lines.push(dir);\n            hasDirectives = true;\n        }\n        else if (doc.directives.docStart)\n            hasDirectives = true;\n    }\n    if (hasDirectives)\n        lines.push('---');\n    const ctx = stringify.createStringifyContext(doc, options);\n    const { commentString } = ctx.options;\n    if (doc.commentBefore) {\n        if (lines.length !== 1)\n            lines.unshift('');\n        const cs = commentString(doc.commentBefore);\n        lines.unshift(stringifyComment.indentComment(cs, ''));\n    }\n    let chompKeep = false;\n    let contentComment = null;\n    if (doc.contents) {\n        if (identity.isNode(doc.contents)) {\n            if (doc.contents.spaceBefore && hasDirectives)\n                lines.push('');\n            if (doc.contents.commentBefore) {\n                const cs = commentString(doc.contents.commentBefore);\n                lines.push(stringifyComment.indentComment(cs, ''));\n            }\n            // top-level block scalars need to be indented if followed by a comment\n            ctx.forceBlockIndent = !!doc.comment;\n            contentComment = doc.contents.comment;\n        }\n        const onChompKeep = contentComment ? undefined : () => (chompKeep = true);\n        let body = stringify.stringify(doc.contents, ctx, () => (contentComment = null), onChompKeep);\n        if (contentComment)\n            body += stringifyComment.lineComment(body, '', commentString(contentComment));\n        if ((body[0] === '|' || body[0] === '>') &&\n            lines[lines.length - 1] === '---') {\n            // Top-level block scalars with a preceding doc marker ought to use the\n            // same line for their header.\n            lines[lines.length - 1] = `--- ${body}`;\n        }\n        else\n            lines.push(body);\n    }\n    else {\n        lines.push(stringify.stringify(doc.contents, ctx));\n    }\n    if (doc.directives?.docEnd) {\n        if (doc.comment) {\n            const cs = commentString(doc.comment);\n            if (cs.includes('\\n')) {\n                lines.push('...');\n                lines.push(stringifyComment.indentComment(cs, ''));\n            }\n            else {\n                lines.push(`... ${cs}`);\n            }\n        }\n        else {\n            lines.push('...');\n        }\n    }\n    else {\n        let dc = doc.comment;\n        if (dc && chompKeep)\n            dc = dc.replace(/^\\n+/, '');\n        if (dc) {\n            if ((!chompKeep || contentComment) && lines[lines.length - 1] !== '')\n                lines.push('');\n            lines.push(stringifyComment.indentComment(commentString(dc), ''));\n        }\n    }\n    return lines.join('\\n') + '\\n';\n}\n\nexports.stringifyDocument = stringifyDocument;\n","'use strict';\n\nfunction stringifyNumber({ format, minFractionDigits, tag, value }) {\n    if (typeof value === 'bigint')\n        return String(value);\n    const num = typeof value === 'number' ? value : Number(value);\n    if (!isFinite(num))\n        return isNaN(num) ? '.nan' : num < 0 ? '-.inf' : '.inf';\n    let n = JSON.stringify(value);\n    if (!format &&\n        minFractionDigits &&\n        (!tag || tag === 'tag:yaml.org,2002:float') &&\n        /^\\d/.test(n)) {\n        let i = n.indexOf('.');\n        if (i < 0) {\n            i = n.length;\n            n += '.';\n        }\n        let d = minFractionDigits - (n.length - i - 1);\n        while (d-- > 0)\n            n += '0';\n    }\n    return n;\n}\n\nexports.stringifyNumber = stringifyNumber;\n","'use strict';\n\nvar identity = require('../nodes/identity.js');\nvar Scalar = require('../nodes/Scalar.js');\nvar stringify = require('./stringify.js');\nvar stringifyComment = require('./stringifyComment.js');\n\nfunction stringifyPair({ key, value }, ctx, onComment, onChompKeep) {\n    const { allNullValues, doc, indent, indentStep, options: { commentString, indentSeq, simpleKeys } } = ctx;\n    let keyComment = (identity.isNode(key) && key.comment) || null;\n    if (simpleKeys) {\n        if (keyComment) {\n            throw new Error('With simple keys, key nodes cannot have comments');\n        }\n        if (identity.isCollection(key) || (!identity.isNode(key) && typeof key === 'object')) {\n            const msg = 'With simple keys, collection cannot be used as a key value';\n            throw new Error(msg);\n        }\n    }\n    let explicitKey = !simpleKeys &&\n        (!key ||\n            (keyComment && value == null && !ctx.inFlow) ||\n            identity.isCollection(key) ||\n            (identity.isScalar(key)\n                ? key.type === Scalar.Scalar.BLOCK_FOLDED || key.type === Scalar.Scalar.BLOCK_LITERAL\n                : typeof key === 'object'));\n    ctx = Object.assign({}, ctx, {\n        allNullValues: false,\n        implicitKey: !explicitKey && (simpleKeys || !allNullValues),\n        indent: indent + indentStep\n    });\n    let keyCommentDone = false;\n    let chompKeep = false;\n    let str = stringify.stringify(key, ctx, () => (keyCommentDone = true), () => (chompKeep = true));\n    if (!explicitKey && !ctx.inFlow && str.length > 1024) {\n        if (simpleKeys)\n            throw new Error('With simple keys, single line scalar must not span more than 1024 characters');\n        explicitKey = true;\n    }\n    if (ctx.inFlow) {\n        if (allNullValues || value == null) {\n            if (keyCommentDone && onComment)\n                onComment();\n            return str === '' ? '?' : explicitKey ? `? ${str}` : str;\n        }\n    }\n    else if ((allNullValues && !simpleKeys) || (value == null && explicitKey)) {\n        str = `? ${str}`;\n        if (keyComment && !keyCommentDone) {\n            str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n        }\n        else if (chompKeep && onChompKeep)\n            onChompKeep();\n        return str;\n    }\n    if (keyCommentDone)\n        keyComment = null;\n    if (explicitKey) {\n        if (keyComment)\n            str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n        str = `? ${str}\\n${indent}:`;\n    }\n    else {\n        str = `${str}:`;\n        if (keyComment)\n            str += stringifyComment.lineComment(str, ctx.indent, commentString(keyComment));\n    }\n    let vsb, vcb, valueComment;\n    if (identity.isNode(value)) {\n        vsb = !!value.spaceBefore;\n        vcb = value.commentBefore;\n        valueComment = value.comment;\n    }\n    else {\n        vsb = false;\n        vcb = null;\n        valueComment = null;\n        if (value && typeof value === 'object')\n            value = doc.createNode(value);\n    }\n    ctx.implicitKey = false;\n    if (!explicitKey && !keyComment && identity.isScalar(value))\n        ctx.indentAtStart = str.length + 1;\n    chompKeep = false;\n    if (!indentSeq &&\n        indentStep.length >= 2 &&\n        !ctx.inFlow &&\n        !explicitKey &&\n        identity.isSeq(value) &&\n        !value.flow &&\n        !value.tag &&\n        !value.anchor) {\n        // If indentSeq === false, consider '- ' as part of indentation where possible\n        ctx.indent = ctx.indent.substring(2);\n    }\n    let valueCommentDone = false;\n    const valueStr = stringify.stringify(value, ctx, () => (valueCommentDone = true), () => (chompKeep = true));\n    let ws = ' ';\n    if (keyComment || vsb || vcb) {\n        ws = vsb ? '\\n' : '';\n        if (vcb) {\n            const cs = commentString(vcb);\n            ws += `\\n${stringifyComment.indentComment(cs, ctx.indent)}`;\n        }\n        if (valueStr === '' && !ctx.inFlow) {\n            if (ws === '\\n')\n                ws = '\\n\\n';\n        }\n        else {\n            ws += `\\n${ctx.indent}`;\n        }\n    }\n    else if (!explicitKey && identity.isCollection(value)) {\n        const vs0 = valueStr[0];\n        const nl0 = valueStr.indexOf('\\n');\n        const hasNewline = nl0 !== -1;\n        const flow = ctx.inFlow ?? value.flow ?? value.items.length === 0;\n        if (hasNewline || !flow) {\n            let hasPropsLine = false;\n            if (hasNewline && (vs0 === '&' || vs0 === '!')) {\n                let sp0 = valueStr.indexOf(' ');\n                if (vs0 === '&' &&\n                    sp0 !== -1 &&\n                    sp0 < nl0 &&\n                    valueStr[sp0 + 1] === '!') {\n                    sp0 = valueStr.indexOf(' ', sp0 + 1);\n                }\n                if (sp0 === -1 || nl0 < sp0)\n                    hasPropsLine = true;\n            }\n            if (!hasPropsLine)\n                ws = `\\n${ctx.indent}`;\n        }\n    }\n    else if (valueStr === '' || valueStr[0] === '\\n') {\n        ws = '';\n    }\n    str += ws + valueStr;\n    if (ctx.inFlow) {\n        if (valueCommentDone && onComment)\n            onComment();\n    }\n    else if (valueComment && !valueCommentDone) {\n        str += stringifyComment.lineComment(str, ctx.indent, commentString(valueComment));\n    }\n    else if (chompKeep && onChompKeep) {\n        onChompKeep();\n    }\n    return str;\n}\n\nexports.stringifyPair = stringifyPair;\n","'use strict';\n\nvar Scalar = require('../nodes/Scalar.js');\nvar foldFlowLines = require('./foldFlowLines.js');\n\nconst getFoldOptions = (ctx, isBlock) => ({\n    indentAtStart: isBlock ? ctx.indent.length : ctx.indentAtStart,\n    lineWidth: ctx.options.lineWidth,\n    minContentWidth: ctx.options.minContentWidth\n});\n// Also checks for lines starting with %, as parsing the output as YAML 1.1 will\n// presume that's starting a new document.\nconst containsDocumentMarker = (str) => /^(%|---|\\.\\.\\.)/m.test(str);\nfunction lineLengthOverLimit(str, lineWidth, indentLength) {\n    if (!lineWidth || lineWidth < 0)\n        return false;\n    const limit = lineWidth - indentLength;\n    const strLen = str.length;\n    if (strLen <= limit)\n        return false;\n    for (let i = 0, start = 0; i < strLen; ++i) {\n        if (str[i] === '\\n') {\n            if (i - start > limit)\n                return true;\n            start = i + 1;\n            if (strLen - start <= limit)\n                return false;\n        }\n    }\n    return true;\n}\nfunction doubleQuotedString(value, ctx) {\n    const json = JSON.stringify(value);\n    if (ctx.options.doubleQuotedAsJSON)\n        return json;\n    const { implicitKey } = ctx;\n    const minMultiLineLength = ctx.options.doubleQuotedMinMultiLineLength;\n    const indent = ctx.indent || (containsDocumentMarker(value) ? '  ' : '');\n    let str = '';\n    let start = 0;\n    for (let i = 0, ch = json[i]; ch; ch = json[++i]) {\n        if (ch === ' ' && json[i + 1] === '\\\\' && json[i + 2] === 'n') {\n            // space before newline needs to be escaped to not be folded\n            str += json.slice(start, i) + '\\\\ ';\n            i += 1;\n            start = i;\n            ch = '\\\\';\n        }\n        if (ch === '\\\\')\n            switch (json[i + 1]) {\n                case 'u':\n                    {\n                        str += json.slice(start, i);\n                        const code = json.substr(i + 2, 4);\n                        switch (code) {\n                            case '0000':\n                                str += '\\\\0';\n                                break;\n                            case '0007':\n                                str += '\\\\a';\n                                break;\n                            case '000b':\n                                str += '\\\\v';\n                                break;\n                            case '001b':\n                                str += '\\\\e';\n                                break;\n                            case '0085':\n                                str += '\\\\N';\n                                break;\n                            case '00a0':\n                                str += '\\\\_';\n                                break;\n                            case '2028':\n                                str += '\\\\L';\n                                break;\n                            case '2029':\n                                str += '\\\\P';\n                                break;\n                            default:\n                                if (code.substr(0, 2) === '00')\n                                    str += '\\\\x' + code.substr(2);\n                                else\n                                    str += json.substr(i, 6);\n                        }\n                        i += 5;\n                        start = i + 1;\n                    }\n                    break;\n                case 'n':\n                    if (implicitKey ||\n                        json[i + 2] === '\"' ||\n                        json.length < minMultiLineLength) {\n                        i += 1;\n                    }\n                    else {\n                        // folding will eat first newline\n                        str += json.slice(start, i) + '\\n\\n';\n                        while (json[i + 2] === '\\\\' &&\n                            json[i + 3] === 'n' &&\n                            json[i + 4] !== '\"') {\n                            str += '\\n';\n                            i += 2;\n                        }\n                        str += indent;\n                        // space after newline needs to be escaped to not be folded\n                        if (json[i + 2] === ' ')\n                            str += '\\\\';\n                        i += 1;\n                        start = i + 1;\n                    }\n                    break;\n                default:\n                    i += 1;\n            }\n    }\n    str = start ? str + json.slice(start) : json;\n    return implicitKey\n        ? str\n        : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_QUOTED, getFoldOptions(ctx, false));\n}\nfunction singleQuotedString(value, ctx) {\n    if (ctx.options.singleQuote === false ||\n        (ctx.implicitKey && value.includes('\\n')) ||\n        /[ \\t]\\n|\\n[ \\t]/.test(value) // single quoted string can't have leading or trailing whitespace around newline\n    )\n        return doubleQuotedString(value, ctx);\n    const indent = ctx.indent || (containsDocumentMarker(value) ? '  ' : '');\n    const res = \"'\" + value.replace(/'/g, \"''\").replace(/\\n+/g, `$&\\n${indent}`) + \"'\";\n    return ctx.implicitKey\n        ? res\n        : foldFlowLines.foldFlowLines(res, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction quotedString(value, ctx) {\n    const { singleQuote } = ctx.options;\n    let qs;\n    if (singleQuote === false)\n        qs = doubleQuotedString;\n    else {\n        const hasDouble = value.includes('\"');\n        const hasSingle = value.includes(\"'\");\n        if (hasDouble && !hasSingle)\n            qs = singleQuotedString;\n        else if (hasSingle && !hasDouble)\n            qs = doubleQuotedString;\n        else\n            qs = singleQuote ? singleQuotedString : doubleQuotedString;\n    }\n    return qs(value, ctx);\n}\n// The negative lookbehind avoids a polynomial search,\n// but isn't supported yet on Safari: https://caniuse.com/js-regexp-lookbehind\nlet blockEndNewlines;\ntry {\n    blockEndNewlines = new RegExp('(^|(?\\n';\n    // determine chomping from whitespace at value end\n    let chomp;\n    let endStart;\n    for (endStart = value.length; endStart > 0; --endStart) {\n        const ch = value[endStart - 1];\n        if (ch !== '\\n' && ch !== '\\t' && ch !== ' ')\n            break;\n    }\n    let end = value.substring(endStart);\n    const endNlPos = end.indexOf('\\n');\n    if (endNlPos === -1) {\n        chomp = '-'; // strip\n    }\n    else if (value === end || endNlPos !== end.length - 1) {\n        chomp = '+'; // keep\n        if (onChompKeep)\n            onChompKeep();\n    }\n    else {\n        chomp = ''; // clip\n    }\n    if (end) {\n        value = value.slice(0, -end.length);\n        if (end[end.length - 1] === '\\n')\n            end = end.slice(0, -1);\n        end = end.replace(blockEndNewlines, `$&${indent}`);\n    }\n    // determine indent indicator from whitespace at value start\n    let startWithSpace = false;\n    let startEnd;\n    let startNlPos = -1;\n    for (startEnd = 0; startEnd < value.length; ++startEnd) {\n        const ch = value[startEnd];\n        if (ch === ' ')\n            startWithSpace = true;\n        else if (ch === '\\n')\n            startNlPos = startEnd;\n        else\n            break;\n    }\n    let start = value.substring(0, startNlPos < startEnd ? startNlPos + 1 : startEnd);\n    if (start) {\n        value = value.substring(start.length);\n        start = start.replace(/\\n+/g, `$&${indent}`);\n    }\n    const indentSize = indent ? '2' : '1'; // root is at -1\n    // Leading | or > is added later\n    let header = (startWithSpace ? indentSize : '') + chomp;\n    if (comment) {\n        header += ' ' + commentString(comment.replace(/ ?[\\r\\n]+/g, ' '));\n        if (onComment)\n            onComment();\n    }\n    if (!literal) {\n        const foldedValue = value\n            .replace(/\\n+/g, '\\n$&')\n            .replace(/(?:^|\\n)([\\t ].*)(?:([\\n\\t ]*)\\n(?![\\n\\t ]))?/g, '$1$2') // more-indented lines aren't folded\n            //                ^ more-ind. ^ empty     ^ capture next empty lines only at end of indent\n            .replace(/\\n+/g, `$&${indent}`);\n        let literalFallback = false;\n        const foldOptions = getFoldOptions(ctx, true);\n        if (blockQuote !== 'folded' && type !== Scalar.Scalar.BLOCK_FOLDED) {\n            foldOptions.onOverflow = () => {\n                literalFallback = true;\n            };\n        }\n        const body = foldFlowLines.foldFlowLines(`${start}${foldedValue}${end}`, indent, foldFlowLines.FOLD_BLOCK, foldOptions);\n        if (!literalFallback)\n            return `>${header}\\n${indent}${body}`;\n    }\n    value = value.replace(/\\n+/g, `$&${indent}`);\n    return `|${header}\\n${indent}${start}${value}${end}`;\n}\nfunction plainString(item, ctx, onComment, onChompKeep) {\n    const { type, value } = item;\n    const { actualString, implicitKey, indent, indentStep, inFlow } = ctx;\n    if ((implicitKey && value.includes('\\n')) ||\n        (inFlow && /[[\\]{},]/.test(value))) {\n        return quotedString(value, ctx);\n    }\n    if (!value ||\n        /^[\\n\\t ,[\\]{}#&*!|>'\"%@`]|^[?-]$|^[?-][ \\t]|[\\n:][ \\t]|[ \\t]\\n|[\\n\\t ]#|[\\n\\t :]$/.test(value)) {\n        // not allowed:\n        // - empty string, '-' or '?'\n        // - start with an indicator character (except [?:-]) or /[?-] /\n        // - '\\n ', ': ' or ' \\n' anywhere\n        // - '#' not preceded by a non-space char\n        // - end with ' ' or ':'\n        return implicitKey || inFlow || !value.includes('\\n')\n            ? quotedString(value, ctx)\n            : blockString(item, ctx, onComment, onChompKeep);\n    }\n    if (!implicitKey &&\n        !inFlow &&\n        type !== Scalar.Scalar.PLAIN &&\n        value.includes('\\n')) {\n        // Where allowed & type not set explicitly, prefer block style for multiline strings\n        return blockString(item, ctx, onComment, onChompKeep);\n    }\n    if (containsDocumentMarker(value)) {\n        if (indent === '') {\n            ctx.forceBlockIndent = true;\n            return blockString(item, ctx, onComment, onChompKeep);\n        }\n        else if (implicitKey && indent === indentStep) {\n            return quotedString(value, ctx);\n        }\n    }\n    const str = value.replace(/\\n+/g, `$&\\n${indent}`);\n    // Verify that output will be parsed as a string, as e.g. plain numbers and\n    // booleans get parsed with those types in v1.2 (e.g. '42', 'true' & '0.9e-3'),\n    // and others in v1.1.\n    if (actualString) {\n        const test = (tag) => tag.default && tag.tag !== 'tag:yaml.org,2002:str' && tag.test?.test(str);\n        const { compat, tags } = ctx.doc.schema;\n        if (tags.some(test) || compat?.some(test))\n            return quotedString(value, ctx);\n    }\n    return implicitKey\n        ? str\n        : foldFlowLines.foldFlowLines(str, indent, foldFlowLines.FOLD_FLOW, getFoldOptions(ctx, false));\n}\nfunction stringifyString(item, ctx, onComment, onChompKeep) {\n    const { implicitKey, inFlow } = ctx;\n    const ss = typeof item.value === 'string'\n        ? item\n        : Object.assign({}, item, { value: String(item.value) });\n    let { type } = item;\n    if (type !== Scalar.Scalar.QUOTE_DOUBLE) {\n        // force double quotes on control characters & unpaired surrogates\n        if (/[\\x00-\\x08\\x0b-\\x1f\\x7f-\\x9f\\u{D800}-\\u{DFFF}]/u.test(ss.value))\n            type = Scalar.Scalar.QUOTE_DOUBLE;\n    }\n    const _stringify = (_type) => {\n        switch (_type) {\n            case Scalar.Scalar.BLOCK_FOLDED:\n            case Scalar.Scalar.BLOCK_LITERAL:\n                return implicitKey || inFlow\n                    ? quotedString(ss.value, ctx) // blocks are not valid inside flow containers\n                    : blockString(ss, ctx, onComment, onChompKeep);\n            case Scalar.Scalar.QUOTE_DOUBLE:\n                return doubleQuotedString(ss.value, ctx);\n            case Scalar.Scalar.QUOTE_SINGLE:\n                return singleQuotedString(ss.value, ctx);\n            case Scalar.Scalar.PLAIN:\n                return plainString(ss, ctx, onComment, onChompKeep);\n            default:\n                return null;\n        }\n    };\n    let res = _stringify(type);\n    if (res === null) {\n        const { defaultKeyType, defaultStringType } = ctx.options;\n        const t = (implicitKey && defaultKeyType) || defaultStringType;\n        res = _stringify(t);\n        if (res === null)\n            throw new Error(`Unsupported default string type ${t}`);\n    }\n    return res;\n}\n\nexports.stringifyString = stringifyString;\n","'use strict';\n\nvar identity = require('./nodes/identity.js');\n\nconst BREAK = Symbol('break visit');\nconst SKIP = Symbol('skip children');\nconst REMOVE = Symbol('remove node');\n/**\n * Apply a visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n *   - `key`: For sequence values and map `Pair`, the node's index in the\n *     collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n *     `null` for the root node.\n *   - `node`: The current node.\n *   - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n *   - `undefined` (default): Do nothing and continue\n *   - `visit.SKIP`: Do not visit the children of this node, continue with next\n *     sibling\n *   - `visit.BREAK`: Terminate traversal completely\n *   - `visit.REMOVE`: Remove the current node, then continue with the next one\n *   - `Node`: Replace the current node, then continue by visiting it\n *   - `number`: While iterating the items of a sequence or map, set the index\n *     of the next step. This is useful especially if the index of the current\n *     node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nfunction visit(node, visitor) {\n    const visitor_ = initVisitor(visitor);\n    if (identity.isDocument(node)) {\n        const cd = visit_(null, node.contents, visitor_, Object.freeze([node]));\n        if (cd === REMOVE)\n            node.contents = null;\n    }\n    else\n        visit_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisit.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisit.SKIP = SKIP;\n/** Remove the current node */\nvisit.REMOVE = REMOVE;\nfunction visit_(key, node, visitor, path) {\n    const ctrl = callVisitor(key, node, visitor, path);\n    if (identity.isNode(ctrl) || identity.isPair(ctrl)) {\n        replaceNode(key, path, ctrl);\n        return visit_(key, ctrl, visitor, path);\n    }\n    if (typeof ctrl !== 'symbol') {\n        if (identity.isCollection(node)) {\n            path = Object.freeze(path.concat(node));\n            for (let i = 0; i < node.items.length; ++i) {\n                const ci = visit_(i, node.items[i], visitor, path);\n                if (typeof ci === 'number')\n                    i = ci - 1;\n                else if (ci === BREAK)\n                    return BREAK;\n                else if (ci === REMOVE) {\n                    node.items.splice(i, 1);\n                    i -= 1;\n                }\n            }\n        }\n        else if (identity.isPair(node)) {\n            path = Object.freeze(path.concat(node));\n            const ck = visit_('key', node.key, visitor, path);\n            if (ck === BREAK)\n                return BREAK;\n            else if (ck === REMOVE)\n                node.key = null;\n            const cv = visit_('value', node.value, visitor, path);\n            if (cv === BREAK)\n                return BREAK;\n            else if (cv === REMOVE)\n                node.value = null;\n        }\n    }\n    return ctrl;\n}\n/**\n * Apply an async visitor to an AST node or document.\n *\n * Walks through the tree (depth-first) starting from `node`, calling a\n * `visitor` function with three arguments:\n *   - `key`: For sequence values and map `Pair`, the node's index in the\n *     collection. Within a `Pair`, `'key'` or `'value'`, correspondingly.\n *     `null` for the root node.\n *   - `node`: The current node.\n *   - `path`: The ancestry of the current node.\n *\n * The return value of the visitor may be used to control the traversal:\n *   - `Promise`: Must resolve to one of the following values\n *   - `undefined` (default): Do nothing and continue\n *   - `visit.SKIP`: Do not visit the children of this node, continue with next\n *     sibling\n *   - `visit.BREAK`: Terminate traversal completely\n *   - `visit.REMOVE`: Remove the current node, then continue with the next one\n *   - `Node`: Replace the current node, then continue by visiting it\n *   - `number`: While iterating the items of a sequence or map, set the index\n *     of the next step. This is useful especially if the index of the current\n *     node has changed.\n *\n * If `visitor` is a single function, it will be called with all values\n * encountered in the tree, including e.g. `null` values. Alternatively,\n * separate visitor functions may be defined for each `Map`, `Pair`, `Seq`,\n * `Alias` and `Scalar` node. To define the same visitor function for more than\n * one node type, use the `Collection` (map and seq), `Value` (map, seq & scalar)\n * and `Node` (alias, map, seq & scalar) targets. Of all these, only the most\n * specific defined one will be used for each node.\n */\nasync function visitAsync(node, visitor) {\n    const visitor_ = initVisitor(visitor);\n    if (identity.isDocument(node)) {\n        const cd = await visitAsync_(null, node.contents, visitor_, Object.freeze([node]));\n        if (cd === REMOVE)\n            node.contents = null;\n    }\n    else\n        await visitAsync_(null, node, visitor_, Object.freeze([]));\n}\n// Without the `as symbol` casts, TS declares these in the `visit`\n// namespace using `var`, but then complains about that because\n// `unique symbol` must be `const`.\n/** Terminate visit traversal completely */\nvisitAsync.BREAK = BREAK;\n/** Do not visit the children of the current node */\nvisitAsync.SKIP = SKIP;\n/** Remove the current node */\nvisitAsync.REMOVE = REMOVE;\nasync function visitAsync_(key, node, visitor, path) {\n    const ctrl = await callVisitor(key, node, visitor, path);\n    if (identity.isNode(ctrl) || identity.isPair(ctrl)) {\n        replaceNode(key, path, ctrl);\n        return visitAsync_(key, ctrl, visitor, path);\n    }\n    if (typeof ctrl !== 'symbol') {\n        if (identity.isCollection(node)) {\n            path = Object.freeze(path.concat(node));\n            for (let i = 0; i < node.items.length; ++i) {\n                const ci = await visitAsync_(i, node.items[i], visitor, path);\n                if (typeof ci === 'number')\n                    i = ci - 1;\n                else if (ci === BREAK)\n                    return BREAK;\n                else if (ci === REMOVE) {\n                    node.items.splice(i, 1);\n                    i -= 1;\n                }\n            }\n        }\n        else if (identity.isPair(node)) {\n            path = Object.freeze(path.concat(node));\n            const ck = await visitAsync_('key', node.key, visitor, path);\n            if (ck === BREAK)\n                return BREAK;\n            else if (ck === REMOVE)\n                node.key = null;\n            const cv = await visitAsync_('value', node.value, visitor, path);\n            if (cv === BREAK)\n                return BREAK;\n            else if (cv === REMOVE)\n                node.value = null;\n        }\n    }\n    return ctrl;\n}\nfunction initVisitor(visitor) {\n    if (typeof visitor === 'object' &&\n        (visitor.Collection || visitor.Node || visitor.Value)) {\n        return Object.assign({\n            Alias: visitor.Node,\n            Map: visitor.Node,\n            Scalar: visitor.Node,\n            Seq: visitor.Node\n        }, visitor.Value && {\n            Map: visitor.Value,\n            Scalar: visitor.Value,\n            Seq: visitor.Value\n        }, visitor.Collection && {\n            Map: visitor.Collection,\n            Seq: visitor.Collection\n        }, visitor);\n    }\n    return visitor;\n}\nfunction callVisitor(key, node, visitor, path) {\n    if (typeof visitor === 'function')\n        return visitor(key, node, path);\n    if (identity.isMap(node))\n        return visitor.Map?.(key, node, path);\n    if (identity.isSeq(node))\n        return visitor.Seq?.(key, node, path);\n    if (identity.isPair(node))\n        return visitor.Pair?.(key, node, path);\n    if (identity.isScalar(node))\n        return visitor.Scalar?.(key, node, path);\n    if (identity.isAlias(node))\n        return visitor.Alias?.(key, node, path);\n    return undefined;\n}\nfunction replaceNode(key, path, node) {\n    const parent = path[path.length - 1];\n    if (identity.isCollection(parent)) {\n        parent.items[key] = node;\n    }\n    else if (identity.isPair(parent)) {\n        if (key === 'key')\n            parent.key = node;\n        else\n            parent.value = node;\n    }\n    else if (identity.isDocument(parent)) {\n        parent.contents = node;\n    }\n    else {\n        const pt = identity.isAlias(parent) ? 'alias' : 'scalar';\n        throw new Error(`Cannot replace node with ${pt} parent`);\n    }\n}\n\nexports.visit = visit;\nexports.visitAsync = visitAsync;\n","// The module cache\nvar __webpack_module_cache__ = {};\n\n// The require function\nfunction __webpack_require__(moduleId) {\n\t// Check if module is in cache\n\tvar cachedModule = __webpack_module_cache__[moduleId];\n\tif (cachedModule !== undefined) {\n\t\treturn cachedModule.exports;\n\t}\n\t// Create a new module (and put it into the cache)\n\tvar module = __webpack_module_cache__[moduleId] = {\n\t\t// no module.id needed\n\t\t// no module.loaded needed\n\t\texports: {}\n\t};\n\n\t// Execute the module function\n\tvar threw = true;\n\ttry {\n\t\t__webpack_modules__[moduleId].call(module.exports, module, module.exports, __webpack_require__);\n\t\tthrew = false;\n\t} finally {\n\t\tif(threw) delete __webpack_module_cache__[moduleId];\n\t}\n\n\t// Return the exports of the module\n\treturn module.exports;\n}\n\n","\nif (typeof __webpack_require__ !== 'undefined') __webpack_require__.ab = __dirname + \"/\";","","// startup\n// Load entry module and return exports\n// This entry module is referenced by other modules so it can't be inlined\nvar __webpack_exports__ = __webpack_require__(3109);\n",""],"names":[],"sourceRoot":""}
\ No newline at end of file
diff --git a/package.json b/package.json
index 5ec57c8..8ffaff2 100644
--- a/package.json
+++ b/package.json
@@ -9,7 +9,8 @@
     "build": "tsc",
     "package": "ncc build --source-map --license licenses.txt",
     "format": "prettier --write .",
-    "lint": "prettier --check ."
+    "lint": "prettier --check .",
+    "test": "vitest"
   },
   "dependencies": {
     "@actions/core": "^1.10.0",
@@ -17,12 +18,15 @@
     "minimatch": "^7.4.2",
     "openai": "^4.20.1",
     "parse-diff": "^0.11.1",
-    "ts-node": "^10.9.1"
+    "ts-node": "^10.9.1",
+    "yaml": "^2.6.1"
   },
   "devDependencies": {
+    "@types/jest": "^29.0.0",
     "@types/node": "^18.15.5",
     "@vercel/ncc": "^0.36.1",
     "prettier": "^2.8.6",
-    "typescript": "^5.0.2"
+    "typescript": "^5.0.2",
+    "vitest": "^2.1.5"
   }
 }
diff --git a/rules.yaml b/rules.yaml
new file mode 100644
index 0000000..e4bb8c7
--- /dev/null
+++ b/rules.yaml
@@ -0,0 +1,23 @@
+# Global rules that apply to all files/directories unless overridden
+global: []  
+
+# Rules for specific file extensions
+extensions:
+  [".ts", ".js"]: ["Always use export default"]
+  "tsx": ["Always use arrow functions"]
+
+# Rules for specific directories
+directories: 
+  "src/**": []
+
+# Paths to ignore completely
+ignore:
+  - ".git"
+  - "node_modules"
+  - "dist"
+  - "build" 
+  - "_build" 
+  - "csv" 
+  - "json" 
+
+  
\ No newline at end of file
diff --git a/src/main.test.ts b/src/main.test.ts
new file mode 100644
index 0000000..9b66360
--- /dev/null
+++ b/src/main.test.ts
@@ -0,0 +1,139 @@
+import { describe, expect, test } from 'vitest';
+import { getApplicableRules, readRules } from './main';
+
+describe('readRules', () => {
+  test('should read and return rules from the file', () => {
+    const result = readRules('rules.yaml');
+    expect(result).toEqual({
+      directories: {
+        "src/**": [],
+      },
+      extensions: {
+        '.ts': ["Always use export default"],
+        '.js': ["Always use export default"],
+        'tsx': [ 'Always use arrow functions' ],
+      },
+      global: [],
+      ignore: [
+        ".git",
+        "node_modules",
+        "dist",
+        "build",
+        "_build",
+        "csv",
+        "json",
+      ]
+    });
+  });
+});
+
+describe('getApplicableRules', () => {
+  test('Given a list of rules for .ts extension and a file with that extension, it should return the rules that apply to the file', () => {
+
+    const rules ={
+      directories: {
+        src: [],
+      },
+      extensions: {
+        '.ts': ["Always use export default"],
+        '.js': ["Always use export default"],
+        'tsx': [ 'Always use arrow functions' ],
+      },
+      global: ["Always be concise"],
+      ignore: [
+        ".git",
+        "node_modules",
+        "dist",
+        "build",
+        "_build",
+        "csv",
+        "json",
+      ]
+    };
+    
+    const result = getApplicableRules(rules, {
+      to: "src/main.ts",
+      chunks: [],
+      deletions: 0,
+      additions: 0
+    });
+
+    expect(result).toEqual([
+      "Always be concise",
+      "Always use export default",
+    ]);
+
+  });
+  test('Given an ignore file, it should return only global rules', () => {
+
+    const rules ={
+      directories: {
+        src: [],
+      },
+      extensions: {
+        '.ts': ["Always use export default"],
+        '.js': ["Always use export default"],
+        'tsx': [ 'Always use arrow functions' ],
+      },
+      global: ["Always be concise"],
+      ignore: [
+        ".git",
+        "node_modules",
+        "dist",
+        "build",
+        "_build",
+        "csv",
+        "json",
+      ]
+    };
+    
+    const result = getApplicableRules(rules, {
+      to: "src/.git",
+      chunks: [],
+      deletions: 0,
+      additions: 0
+    });
+
+    expect(result).toEqual([
+      "Always be concise",
+    ]);
+
+  });
+  test('Given a file with rules that apply to its extension and directory, it should return all the rules', () => {
+
+    const rules ={
+      directories: {
+        "**/resolvers/**": ["Always write tests"],
+      },
+      extensions: {
+        '.ts': ["Always use export default"],
+        '.js': ["Always use export default"],
+        'tsx': [ 'Always use arrow functions' ],
+      },
+      global: ["Always be concise"],
+      ignore: [
+        ".git",
+        "node_modules",
+        "dist",
+        "build",
+        "_build",
+        "csv",
+        "json",
+      ]
+    };
+    
+    const result = getApplicableRules(rules, {
+      to: "lib/web_app/resolvers/bots.ts",
+      chunks: [],
+      deletions: 0,
+      additions: 0
+    });
+
+    expect(result).toEqual([
+      "Always be concise",
+      "Always use export default",
+      "Always write tests",
+    ]);
+
+  });
+});
\ No newline at end of file
diff --git a/src/main.ts b/src/main.ts
index 13677ae..bcf26db 100644
--- a/src/main.ts
+++ b/src/main.ts
@@ -4,6 +4,7 @@ import OpenAI from "openai";
 import { Octokit } from "@octokit/rest";
 import parseDiff, { Chunk, File } from "parse-diff";
 import minimatch from "minimatch";
+import { parse, stringify } from 'yaml'
 
 const GITHUB_TOKEN: string = core.getInput("GITHUB_TOKEN");
 const OPENAI_API_KEY: string = core.getInput("OPENAI_API_KEY");
@@ -58,14 +59,15 @@ async function getDiff(
 
 async function analyzeCode(
   parsedDiff: File[],
-  prDetails: PRDetails
+  prDetails: PRDetails,
+  rules: RulesFile
 ): Promise> {
   const comments: Array<{ body: string; path: string; line: number }> = [];
 
   for (const file of parsedDiff) {
     if (file.to === "/dev/null") continue; // Ignore deleted files
     for (const chunk of file.chunks) {
-      const prompt = createPrompt(file, chunk, prDetails);
+      const prompt = createPrompt(file, chunk, prDetails, rules);
       const aiResponse = await getAIResponse(prompt);
       if (aiResponse) {
         const newComments = createComment(file, chunk, aiResponse);
@@ -78,7 +80,31 @@ async function analyzeCode(
   return comments;
 }
 
-function createPrompt(file: File, chunk: Chunk, prDetails: PRDetails): string {
+function getApplicableRules(rules: RulesFile, file: File): string[] {
+  const {extensions, directories, global} = rules;
+  
+  const extensionsKeys = Object.keys(extensions);
+  const applicableExtensionKeys = extensionsKeys.filter((key: string) => file.to?.endsWith(key));
+  const extensionRules = applicableExtensionKeys.flatMap((key: string) => extensions[key]);
+
+  const directoriesKeys = Object.keys(directories);
+  const applicableDirectoriesKeys = directoriesKeys.filter((key: string) => 
+    file.to ? minimatch(file.to, key) : false
+  );
+  const directoryRules = applicableDirectoriesKeys.flatMap((key: string) => directories[key]);
+
+  return [...global, ...extensionRules, ...directoryRules];
+}
+
+interface AIReviewLine {
+  lineNumber: string;
+  reviewComment: string;
+  path: string;
+}
+
+function createPrompt(file: File, chunk: Chunk, prDetails: PRDetails, rules: RulesFile): string {
+  const applicableRules = getApplicableRules(rules, file);
+
   return `Your task is to review pull requests. Instructions:
 - Provide the response in following JSON format:  {"reviews": [{"lineNumber":  , "reviewComment": ""}]}
 - Do not give positive comments or compliments.
@@ -87,9 +113,13 @@ function createPrompt(file: File, chunk: Chunk, prDetails: PRDetails): string {
 - Use the given description only for the overall context and only comment the code.
 - IMPORTANT: NEVER suggest adding comments to the code.
 
-Review the following code diff in the file "${
-    file.to
-  }" and take the pull request title and description into account when writing the response.
+Review the following code diff in the file "${file.to
+    }" and take the pull request title and description into account when writing the response.
+
+${applicableRules.length ?? 
+(`Always check the following rules to write the review:
+  ${applicableRules.join("\n")}
+`)}    
   
 Pull request title: ${prDetails.title}
 Pull request description:
@@ -103,17 +133,14 @@ Git diff to review:
 \`\`\`diff
 ${chunk.content}
 ${chunk.changes
-  // @ts-expect-error - ln and ln2 exists where needed
-  .map((c) => `${c.ln ? c.ln : c.ln2} ${c.content}`)
-  .join("\n")}
+      // @ts-expect-error - ln and ln2 exists where needed
+      .map((c) => `${c.ln ? c.ln : c.ln2} ${c.content}`)
+      .join("\n")}
 \`\`\`
 `;
 }
 
-async function getAIResponse(prompt: string): Promise | null> {
+async function getAIResponse(prompt: string): Promise | null> {
   const queryConfig = {
     model: OPENAI_API_MODEL,
     temperature: 0.2,
@@ -126,10 +153,7 @@ async function getAIResponse(prompt: string): Promise
+  aiResponses: Array
 ): Array<{ body: string; path: string; line: number }> {
-  return aiResponses.flatMap((aiResponse) => {
+  return aiResponses.flatMap((aiResponse: AIReviewLine) => {
     if (!file.to) {
       return [];
     }
@@ -221,18 +242,20 @@ async function main() {
 
   const parsedDiff = parseDiff(diff);
 
-  const excludePatterns = core
-    .getInput("exclude")
-    .split(",")
-    .map((s) => s.trim());
+  const rulesFiles = core
+    .getInput("rules_file_name")
+    .trim();
+
+  const rules = readRules(rulesFiles);
+  const {ignore: allIgnorePatterns = []} = rules;
 
   const filteredDiff = parsedDiff.filter((file) => {
-    return !excludePatterns.some((pattern) =>
+    return !allIgnorePatterns.some((pattern) =>
       minimatch(file.to ?? "", pattern)
     );
   });
 
-  const comments = await analyzeCode(filteredDiff, prDetails);
+  const comments = await analyzeCode(filteredDiff, prDetails, rules);
   if (comments.length > 0) {
     await createReviewComment(
       prDetails.owner,
@@ -243,7 +266,50 @@ async function main() {
   }
 }
 
-main().catch((error) => {
-  console.error("Error:", error);
-  process.exit(1);
-});
+interface RulesFile {
+  directories: Record;
+  extensions: Record;
+  global: Array;
+  ignore: Array;
+}
+
+function readRules(fileName: string): RulesFile {
+    const fileContents = readFileSync(fileName, "utf8");
+    const parsed = parse(fileContents, { mapAsMap: true });
+    
+    // Transform the parsed Map into a plain object
+    const rules: RulesFile = {
+        global: parsed.get('global') || [],
+        extensions: {},
+        directories: {},
+        ignore: parsed.get('ignore') || []
+    };
+
+    // Handle extensions map
+    const extensionsMap = parsed.get('extensions');
+    if (extensionsMap instanceof Map) {
+        for (const [key, value] of extensionsMap.entries()) {
+            // If key is an array, create an entry for each extension
+            if (Array.isArray(key)) {
+                key.forEach(ext => {
+                    rules.extensions[ext] = value;
+                });
+            } else {
+                rules.extensions[key] = value;
+            }
+        }
+    }
+
+    // Handle directories map
+    const directoriesMap = parsed.get('directories');
+    if (directoriesMap instanceof Map) {
+        for (const [key, value] of directoriesMap.entries()) {
+            rules.directories[key] = value;
+        }
+    }
+
+    return rules;
+}
+
+export { readRules, getApplicableRules };
+export default main;
diff --git a/yarn.lock b/yarn.lock
index 0998992..729b170 100644
--- a/yarn.lock
+++ b/yarn.lock
@@ -17,6 +17,20 @@
   dependencies:
     tunnel "^0.0.6"
 
+"@babel/code-frame@^7.12.13":
+  version "7.26.2"
+  resolved "https://registry.yarnpkg.com/@babel/code-frame/-/code-frame-7.26.2.tgz#4b5fab97d33338eff916235055f0ebc21e573a85"
+  integrity sha512-RJlIHRueQgwWitWgF8OdFYGZX328Ax5BCemNGlqHfplnRT9ESi8JkFlvaVYbS+UubVY6dpv87Fs2u5M29iNFVQ==
+  dependencies:
+    "@babel/helper-validator-identifier" "^7.25.9"
+    js-tokens "^4.0.0"
+    picocolors "^1.0.0"
+
+"@babel/helper-validator-identifier@^7.25.9":
+  version "7.25.9"
+  resolved "https://registry.yarnpkg.com/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.9.tgz#24b64e2c3ec7cd3b3c547729b8d16871f22cbdc7"
+  integrity sha512-Ed61U6XJc3CVRfkERJWDz4dJwKe7iLmmJsbOGu9wSloNSFttHV0I8g6UAgb7qnK5ly5bGLPd4oXZlxCdANBOWQ==
+
 "@cspotcode/source-map-support@^0.8.0":
   version "0.8.1"
   resolved "https://registry.yarnpkg.com/@cspotcode/source-map-support/-/source-map-support-0.8.1.tgz#00629c35a688e05a88b1cda684fb9d5e73f000a1"
@@ -24,6 +38,147 @@
   dependencies:
     "@jridgewell/trace-mapping" "0.3.9"
 
+"@esbuild/aix-ppc64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/aix-ppc64/-/aix-ppc64-0.21.5.tgz#c7184a326533fcdf1b8ee0733e21c713b975575f"
+  integrity sha512-1SDgH6ZSPTlggy1yI6+Dbkiz8xzpHJEVAlF/AM1tHPLsf5STom9rwtjE4hKAF20FfXXNTFqEYXyJNWh1GiZedQ==
+
+"@esbuild/android-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-arm64/-/android-arm64-0.21.5.tgz#09d9b4357780da9ea3a7dfb833a1f1ff439b4052"
+  integrity sha512-c0uX9VAUBQ7dTDCjq+wdyGLowMdtR/GoC2U5IYk/7D1H1JYC0qseD7+11iMP2mRLN9RcCMRcjC4YMclCzGwS/A==
+
+"@esbuild/android-arm@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-arm/-/android-arm-0.21.5.tgz#9b04384fb771926dfa6d7ad04324ecb2ab9b2e28"
+  integrity sha512-vCPvzSjpPHEi1siZdlvAlsPxXl7WbOVUBBAowWug4rJHb68Ox8KualB+1ocNvT5fjv6wpkX6o/iEpbDrf68zcg==
+
+"@esbuild/android-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/android-x64/-/android-x64-0.21.5.tgz#29918ec2db754cedcb6c1b04de8cd6547af6461e"
+  integrity sha512-D7aPRUUNHRBwHxzxRvp856rjUHRFW1SdQATKXH2hqA0kAZb1hKmi02OpYRacl0TxIGz/ZmXWlbZgjwWYaCakTA==
+
+"@esbuild/darwin-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/darwin-arm64/-/darwin-arm64-0.21.5.tgz#e495b539660e51690f3928af50a76fb0a6ccff2a"
+  integrity sha512-DwqXqZyuk5AiWWf3UfLiRDJ5EDd49zg6O9wclZ7kUMv2WRFr4HKjXp/5t8JZ11QbQfUS6/cRCKGwYhtNAY88kQ==
+
+"@esbuild/darwin-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/darwin-x64/-/darwin-x64-0.21.5.tgz#c13838fa57372839abdddc91d71542ceea2e1e22"
+  integrity sha512-se/JjF8NlmKVG4kNIuyWMV/22ZaerB+qaSi5MdrXtd6R08kvs2qCN4C09miupktDitvh8jRFflwGFBQcxZRjbw==
+
+"@esbuild/freebsd-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-arm64/-/freebsd-arm64-0.21.5.tgz#646b989aa20bf89fd071dd5dbfad69a3542e550e"
+  integrity sha512-5JcRxxRDUJLX8JXp/wcBCy3pENnCgBR9bN6JsY4OmhfUtIHe3ZW0mawA7+RDAcMLrMIZaf03NlQiX9DGyB8h4g==
+
+"@esbuild/freebsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/freebsd-x64/-/freebsd-x64-0.21.5.tgz#aa615cfc80af954d3458906e38ca22c18cf5c261"
+  integrity sha512-J95kNBj1zkbMXtHVH29bBriQygMXqoVQOQYA+ISs0/2l3T9/kj42ow2mpqerRBxDJnmkUDCaQT/dfNXWX/ZZCQ==
+
+"@esbuild/linux-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm64/-/linux-arm64-0.21.5.tgz#70ac6fa14f5cb7e1f7f887bcffb680ad09922b5b"
+  integrity sha512-ibKvmyYzKsBeX8d8I7MH/TMfWDXBF3db4qM6sy+7re0YXya+K1cem3on9XgdT2EQGMu4hQyZhan7TeQ8XkGp4Q==
+
+"@esbuild/linux-arm@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-arm/-/linux-arm-0.21.5.tgz#fc6fd11a8aca56c1f6f3894f2bea0479f8f626b9"
+  integrity sha512-bPb5AHZtbeNGjCKVZ9UGqGwo8EUu4cLq68E95A53KlxAPRmUyYv2D6F0uUI65XisGOL1hBP5mTronbgo+0bFcA==
+
+"@esbuild/linux-ia32@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-ia32/-/linux-ia32-0.21.5.tgz#3271f53b3f93e3d093d518d1649d6d68d346ede2"
+  integrity sha512-YvjXDqLRqPDl2dvRODYmmhz4rPeVKYvppfGYKSNGdyZkA01046pLWyRKKI3ax8fbJoK5QbxblURkwK/MWY18Tg==
+
+"@esbuild/linux-loong64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-loong64/-/linux-loong64-0.21.5.tgz#ed62e04238c57026aea831c5a130b73c0f9f26df"
+  integrity sha512-uHf1BmMG8qEvzdrzAqg2SIG/02+4/DHB6a9Kbya0XDvwDEKCoC8ZRWI5JJvNdUjtciBGFQ5PuBlpEOXQj+JQSg==
+
+"@esbuild/linux-mips64el@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-mips64el/-/linux-mips64el-0.21.5.tgz#e79b8eb48bf3b106fadec1ac8240fb97b4e64cbe"
+  integrity sha512-IajOmO+KJK23bj52dFSNCMsz1QP1DqM6cwLUv3W1QwyxkyIWecfafnI555fvSGqEKwjMXVLokcV5ygHW5b3Jbg==
+
+"@esbuild/linux-ppc64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-ppc64/-/linux-ppc64-0.21.5.tgz#5f2203860a143b9919d383ef7573521fb154c3e4"
+  integrity sha512-1hHV/Z4OEfMwpLO8rp7CvlhBDnjsC3CttJXIhBi+5Aj5r+MBvy4egg7wCbe//hSsT+RvDAG7s81tAvpL2XAE4w==
+
+"@esbuild/linux-riscv64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-riscv64/-/linux-riscv64-0.21.5.tgz#07bcafd99322d5af62f618cb9e6a9b7f4bb825dc"
+  integrity sha512-2HdXDMd9GMgTGrPWnJzP2ALSokE/0O5HhTUvWIbD3YdjME8JwvSCnNGBnTThKGEB91OZhzrJ4qIIxk/SBmyDDA==
+
+"@esbuild/linux-s390x@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-s390x/-/linux-s390x-0.21.5.tgz#b7ccf686751d6a3e44b8627ababc8be3ef62d8de"
+  integrity sha512-zus5sxzqBJD3eXxwvjN1yQkRepANgxE9lgOW2qLnmr8ikMTphkjgXu1HR01K4FJg8h1kEEDAqDcZQtbrRnB41A==
+
+"@esbuild/linux-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/linux-x64/-/linux-x64-0.21.5.tgz#6d8f0c768e070e64309af8004bb94e68ab2bb3b0"
+  integrity sha512-1rYdTpyv03iycF1+BhzrzQJCdOuAOtaqHTWJZCWvijKD2N5Xu0TtVC8/+1faWqcP9iBCWOmjmhoH94dH82BxPQ==
+
+"@esbuild/netbsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/netbsd-x64/-/netbsd-x64-0.21.5.tgz#bbe430f60d378ecb88decb219c602667387a6047"
+  integrity sha512-Woi2MXzXjMULccIwMnLciyZH4nCIMpWQAs049KEeMvOcNADVxo0UBIQPfSmxB3CWKedngg7sWZdLvLczpe0tLg==
+
+"@esbuild/openbsd-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/openbsd-x64/-/openbsd-x64-0.21.5.tgz#99d1cf2937279560d2104821f5ccce220cb2af70"
+  integrity sha512-HLNNw99xsvx12lFBUwoT8EVCsSvRNDVxNpjZ7bPn947b8gJPzeHWyNVhFsaerc0n3TsbOINvRP2byTZ5LKezow==
+
+"@esbuild/sunos-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/sunos-x64/-/sunos-x64-0.21.5.tgz#08741512c10d529566baba837b4fe052c8f3487b"
+  integrity sha512-6+gjmFpfy0BHU5Tpptkuh8+uw3mnrvgs+dSPQXQOv3ekbordwnzTVEb4qnIvQcYXq6gzkyTnoZ9dZG+D4garKg==
+
+"@esbuild/win32-arm64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-arm64/-/win32-arm64-0.21.5.tgz#675b7385398411240735016144ab2e99a60fc75d"
+  integrity sha512-Z0gOTd75VvXqyq7nsl93zwahcTROgqvuAcYDUr+vOv8uHhNSKROyU961kgtCD1e95IqPKSQKH7tBTslnS3tA8A==
+
+"@esbuild/win32-ia32@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-ia32/-/win32-ia32-0.21.5.tgz#1bfc3ce98aa6ca9a0969e4d2af72144c59c1193b"
+  integrity sha512-SWXFF1CL2RVNMaVs+BBClwtfZSvDgtL//G/smwAc5oVK/UPu2Gu9tIaRgFmYFFKrmg3SyAjSrElf0TiJ1v8fYA==
+
+"@esbuild/win32-x64@0.21.5":
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/@esbuild/win32-x64/-/win32-x64-0.21.5.tgz#acad351d582d157bb145535db2a6ff53dd514b5c"
+  integrity sha512-tQd/1efJuzPC6rCFwEvLtci/xNFcTZknmXs98FYDfGE4wP9ClFV98nyKrzJKVPMhdDnjzLhdUyMX4PsQAPjwIw==
+
+"@jest/expect-utils@^29.7.0":
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/@jest/expect-utils/-/expect-utils-29.7.0.tgz#023efe5d26a8a70f21677d0a1afc0f0a44e3a1c6"
+  integrity sha512-GlsNBWiFQFCVi9QVSx7f5AgMeLxe9YCCs5PuP2O2LdjDAA8Jh9eX7lA1Jq/xdXw3Wb3hyvlFNfZIfcRetSzYcA==
+  dependencies:
+    jest-get-type "^29.6.3"
+
+"@jest/schemas@^29.6.3":
+  version "29.6.3"
+  resolved "https://registry.yarnpkg.com/@jest/schemas/-/schemas-29.6.3.tgz#430b5ce8a4e0044a7e3819663305a7b3091c8e03"
+  integrity sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==
+  dependencies:
+    "@sinclair/typebox" "^0.27.8"
+
+"@jest/types@^29.6.3":
+  version "29.6.3"
+  resolved "https://registry.yarnpkg.com/@jest/types/-/types-29.6.3.tgz#1131f8cf634e7e84c5e77bab12f052af585fba59"
+  integrity sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==
+  dependencies:
+    "@jest/schemas" "^29.6.3"
+    "@types/istanbul-lib-coverage" "^2.0.0"
+    "@types/istanbul-reports" "^3.0.0"
+    "@types/node" "*"
+    "@types/yargs" "^17.0.8"
+    chalk "^4.0.0"
+
 "@jridgewell/resolve-uri@^3.0.3":
   version "3.1.0"
   resolved "https://registry.yarnpkg.com/@jridgewell/resolve-uri/-/resolve-uri-3.1.0.tgz#2203b118c157721addfe69d47b70465463066d78"
@@ -34,6 +189,11 @@
   resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.14.tgz#add4c98d341472a289190b424efbdb096991bb24"
   integrity sha512-XPSJHWmi394fuUuzDnGz1wiKqWfo1yXecHQMRf2l6hztTO+nPru658AyDngaBe7isIxEkRsPR3FZh+s7iVa4Uw==
 
+"@jridgewell/sourcemap-codec@^1.5.0":
+  version "1.5.0"
+  resolved "https://registry.yarnpkg.com/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz#3188bcb273a414b0d215fd22a58540b989b9409a"
+  integrity sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==
+
 "@jridgewell/trace-mapping@0.3.9":
   version "0.3.9"
   resolved "https://registry.yarnpkg.com/@jridgewell/trace-mapping/-/trace-mapping-0.3.9.tgz#6534fd5933a53ba7cbf3a17615e273a0d1273ff9"
@@ -143,6 +303,101 @@
   dependencies:
     "@octokit/openapi-types" "^16.0.0"
 
+"@rollup/rollup-android-arm-eabi@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.27.4.tgz#e3c9cc13f144ba033df4d2c3130a214dc8e3473e"
+  integrity sha512-2Y3JT6f5MrQkICUyRVCw4oa0sutfAsgaSsb0Lmmy1Wi2y7X5vT9Euqw4gOsCyy0YfKURBg35nhUKZS4mDcfULw==
+
+"@rollup/rollup-android-arm64@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.27.4.tgz#0474250fcb5871aca952e249a0c3270fc4310b55"
+  integrity sha512-wzKRQXISyi9UdCVRqEd0H4cMpzvHYt1f/C3CoIjES6cG++RHKhrBj2+29nPF0IB5kpy9MS71vs07fvrNGAl/iA==
+
+"@rollup/rollup-darwin-arm64@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.27.4.tgz#77c29b4f9c430c1624f1a6835f2a7e82be3d16f2"
+  integrity sha512-PlNiRQapift4LNS8DPUHuDX/IdXiLjf8mc5vdEmUR0fF/pyy2qWwzdLjB+iZquGr8LuN4LnUoSEvKRwjSVYz3Q==
+
+"@rollup/rollup-darwin-x64@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.27.4.tgz#7d87711f641a458868758cbf110fb32eabd6a25a"
+  integrity sha512-o9bH2dbdgBDJaXWJCDTNDYa171ACUdzpxSZt+u/AAeQ20Nk5x+IhA+zsGmrQtpkLiumRJEYef68gcpn2ooXhSQ==
+
+"@rollup/rollup-freebsd-arm64@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-arm64/-/rollup-freebsd-arm64-4.27.4.tgz#662f808d2780e4e91021ac9ee7ed800862bb9a57"
+  integrity sha512-NBI2/i2hT9Q+HySSHTBh52da7isru4aAAo6qC3I7QFVsuhxi2gM8t/EI9EVcILiHLj1vfi+VGGPaLOUENn7pmw==
+
+"@rollup/rollup-freebsd-x64@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-freebsd-x64/-/rollup-freebsd-x64-4.27.4.tgz#71e5a7bcfcbe51d8b65d158675acec1307edea79"
+  integrity sha512-wYcC5ycW2zvqtDYrE7deary2P2UFmSh85PUpAx+dwTCO9uw3sgzD6Gv9n5X4vLaQKsrfTSZZ7Z7uynQozPVvWA==
+
+"@rollup/rollup-linux-arm-gnueabihf@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.27.4.tgz#08f67fcec61ee18f8b33b3f403a834ab8f3aa75d"
+  integrity sha512-9OwUnK/xKw6DyRlgx8UizeqRFOfi9mf5TYCw1uolDaJSbUmBxP85DE6T4ouCMoN6pXw8ZoTeZCSEfSaYo+/s1w==
+
+"@rollup/rollup-linux-arm-musleabihf@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.27.4.tgz#2e1ad4607f86475b1731556359c6070eb8f4b109"
+  integrity sha512-Vgdo4fpuphS9V24WOV+KwkCVJ72u7idTgQaBoLRD0UxBAWTF9GWurJO9YD9yh00BzbkhpeXtm6na+MvJU7Z73A==
+
+"@rollup/rollup-linux-arm64-gnu@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.27.4.tgz#c65d559dcb0d3dabea500cf7b8215959ae6cccf8"
+  integrity sha512-pleyNgyd1kkBkw2kOqlBx+0atfIIkkExOTiifoODo6qKDSpnc6WzUY5RhHdmTdIJXBdSnh6JknnYTtmQyobrVg==
+
+"@rollup/rollup-linux-arm64-musl@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.27.4.tgz#6739f7eb33e20466bb88748519c98ce8dee23922"
+  integrity sha512-caluiUXvUuVyCHr5DxL8ohaaFFzPGmgmMvwmqAITMpV/Q+tPoaHZ/PWa3t8B2WyoRcIIuu1hkaW5KkeTDNSnMA==
+
+"@rollup/rollup-linux-powerpc64le-gnu@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.27.4.tgz#8d9fe9471c256e55278cb1f7b1c977cd8fe6df20"
+  integrity sha512-FScrpHrO60hARyHh7s1zHE97u0KlT/RECzCKAdmI+LEoC1eDh/RDji9JgFqyO+wPDb86Oa/sXkily1+oi4FzJQ==
+
+"@rollup/rollup-linux-riscv64-gnu@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.27.4.tgz#9a467f7ad5b61c9d66b24e79a3c57cb755d02c35"
+  integrity sha512-qyyprhyGb7+RBfMPeww9FlHwKkCXdKHeGgSqmIXw9VSUtvyFZ6WZRtnxgbuz76FK7LyoN8t/eINRbPUcvXB5fw==
+
+"@rollup/rollup-linux-s390x-gnu@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.27.4.tgz#efaddf22df27b87a267a731fbeb9539e92cd4527"
+  integrity sha512-PFz+y2kb6tbh7m3A7nA9++eInGcDVZUACulf/KzDtovvdTizHpZaJty7Gp0lFwSQcrnebHOqxF1MaKZd7psVRg==
+
+"@rollup/rollup-linux-x64-gnu@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.27.4.tgz#a959eccb04b07fd1591d7ff745a6865faa7042cd"
+  integrity sha512-Ni8mMtfo+o/G7DVtweXXV/Ol2TFf63KYjTtoZ5f078AUgJTmaIJnj4JFU7TK/9SVWTaSJGxPi5zMDgK4w+Ez7Q==
+
+"@rollup/rollup-linux-x64-musl@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.27.4.tgz#927764f1da1f2dd50943716dec93796d10cb6e99"
+  integrity sha512-5AeeAF1PB9TUzD+3cROzFTnAJAcVUGLuR8ng0E0WXGkYhp6RD6L+6szYVX+64Rs0r72019KHZS1ka1q+zU/wUw==
+
+"@rollup/rollup-win32-arm64-msvc@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.27.4.tgz#030b6cc607d845da23dced624e47fb45de105840"
+  integrity sha512-yOpVsA4K5qVwu2CaS3hHxluWIK5HQTjNV4tWjQXluMiiiu4pJj4BN98CvxohNCpcjMeTXk/ZMJBRbgRg8HBB6A==
+
+"@rollup/rollup-win32-ia32-msvc@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.27.4.tgz#3457a3f44a84f51d8097c3606429e01f0d2d0ec2"
+  integrity sha512-KtwEJOaHAVJlxV92rNYiG9JQwQAdhBlrjNRp7P9L8Cb4Rer3in+0A+IPhJC9y68WAi9H0sX4AiG2NTsVlmqJeQ==
+
+"@rollup/rollup-win32-x64-msvc@4.27.4":
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.27.4.tgz#67d516613c9f2fe42e2d8b78e252d0003179d92c"
+  integrity sha512-3j4jx1TppORdTAoBJRd+/wJRGCPC0ETWkXOecJ6PPZLj6SptXkrXcNqdj0oclbKML6FkQltdz7bBA3rUSirZug==
+
+"@sinclair/typebox@^0.27.8":
+  version "0.27.8"
+  resolved "https://registry.yarnpkg.com/@sinclair/typebox/-/typebox-0.27.8.tgz#6667fac16c436b5434a387a34dedb013198f6e6e"
+  integrity sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==
+
 "@tsconfig/node10@^1.0.7":
   version "1.0.9"
   resolved "https://registry.yarnpkg.com/@tsconfig/node10/-/node10-1.0.9.tgz#df4907fc07a886922637b15e02d4cebc4c0021b2"
@@ -163,6 +418,38 @@
   resolved "https://registry.yarnpkg.com/@tsconfig/node16/-/node16-1.0.3.tgz#472eaab5f15c1ffdd7f8628bd4c4f753995ec79e"
   integrity sha512-yOlFc+7UtL/89t2ZhjPvvB/DeAr3r+Dq58IgzsFkOAvVC6NMJXmCGjbptdXdR9qsX7pKcTL+s87FtYREi2dEEQ==
 
+"@types/estree@1.0.6", "@types/estree@^1.0.0":
+  version "1.0.6"
+  resolved "https://registry.yarnpkg.com/@types/estree/-/estree-1.0.6.tgz#628effeeae2064a1b4e79f78e81d87b7e5fc7b50"
+  integrity sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==
+
+"@types/istanbul-lib-coverage@*", "@types/istanbul-lib-coverage@^2.0.0":
+  version "2.0.6"
+  resolved "https://registry.yarnpkg.com/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz#7739c232a1fee9b4d3ce8985f314c0c6d33549d7"
+  integrity sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==
+
+"@types/istanbul-lib-report@*":
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz#53047614ae72e19fc0401d872de3ae2b4ce350bf"
+  integrity sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==
+  dependencies:
+    "@types/istanbul-lib-coverage" "*"
+
+"@types/istanbul-reports@^3.0.0":
+  version "3.0.4"
+  resolved "https://registry.yarnpkg.com/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz#0f03e3d2f670fbdac586e34b433783070cc16f54"
+  integrity sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==
+  dependencies:
+    "@types/istanbul-lib-report" "*"
+
+"@types/jest@^29.0.0":
+  version "29.5.14"
+  resolved "https://registry.yarnpkg.com/@types/jest/-/jest-29.5.14.tgz#2b910912fa1d6856cadcd0c1f95af7df1d6049e5"
+  integrity sha512-ZN+4sdnLUbo8EVvVc2ao0GFW6oVrQRPn4K2lglySj7APvSrgzxHiNNK99us4WDMi57xxA2yggblIAMNhXOotLQ==
+  dependencies:
+    expect "^29.0.0"
+    pretty-format "^29.0.0"
+
 "@types/node-fetch@^2.6.4":
   version "2.6.9"
   resolved "https://registry.yarnpkg.com/@types/node-fetch/-/node-fetch-2.6.9.tgz#15f529d247f1ede1824f7e7acdaa192d5f28071e"
@@ -190,11 +477,87 @@
   resolved "https://registry.yarnpkg.com/@types/node/-/node-18.15.5.tgz#3af577099a99c61479149b716183e70b5239324a"
   integrity sha512-Ark2WDjjZO7GmvsyFFf81MXuGTA/d6oP38anyxWOL6EREyBKAxKoFHwBhaZxCfLRLpO8JgVXwqOwSwa7jRcjew==
 
+"@types/stack-utils@^2.0.0":
+  version "2.0.3"
+  resolved "https://registry.yarnpkg.com/@types/stack-utils/-/stack-utils-2.0.3.tgz#6209321eb2c1712a7e7466422b8cb1fc0d9dd5d8"
+  integrity sha512-9aEbYZ3TbYMznPdcdr3SmIrLXwC/AKZXQeCf9Pgao5CKb8CyHuEX5jzWPTkvregvhRJHcpRO6BFoGW9ycaOkYw==
+
+"@types/yargs-parser@*":
+  version "21.0.3"
+  resolved "https://registry.yarnpkg.com/@types/yargs-parser/-/yargs-parser-21.0.3.tgz#815e30b786d2e8f0dcd85fd5bcf5e1a04d008f15"
+  integrity sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==
+
+"@types/yargs@^17.0.8":
+  version "17.0.33"
+  resolved "https://registry.yarnpkg.com/@types/yargs/-/yargs-17.0.33.tgz#8c32303da83eec050a84b3c7ae7b9f922d13e32d"
+  integrity sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==
+  dependencies:
+    "@types/yargs-parser" "*"
+
 "@vercel/ncc@^0.36.1":
   version "0.36.1"
   resolved "https://registry.yarnpkg.com/@vercel/ncc/-/ncc-0.36.1.tgz#d4c01fdbbe909d128d1bf11c7f8b5431654c5b95"
   integrity sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==
 
+"@vitest/expect@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/expect/-/expect-2.1.5.tgz#5a6afa6314cae7a61847927bb5bc038212ca7381"
+  integrity sha512-nZSBTW1XIdpZvEJyoP/Sy8fUg0b8od7ZpGDkTUcfJ7wz/VoZAFzFfLyxVxGFhUjJzhYqSbIpfMtl/+k/dpWa3Q==
+  dependencies:
+    "@vitest/spy" "2.1.5"
+    "@vitest/utils" "2.1.5"
+    chai "^5.1.2"
+    tinyrainbow "^1.2.0"
+
+"@vitest/mocker@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/mocker/-/mocker-2.1.5.tgz#54ee50648bc0bb606dfc58e13edfacb8b9208324"
+  integrity sha512-XYW6l3UuBmitWqSUXTNXcVBUCRytDogBsWuNXQijc00dtnU/9OqpXWp4OJroVrad/gLIomAq9aW8yWDBtMthhQ==
+  dependencies:
+    "@vitest/spy" "2.1.5"
+    estree-walker "^3.0.3"
+    magic-string "^0.30.12"
+
+"@vitest/pretty-format@2.1.5", "@vitest/pretty-format@^2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/pretty-format/-/pretty-format-2.1.5.tgz#bc79b8826d4a63dc04f2a75d2944694039fa50aa"
+  integrity sha512-4ZOwtk2bqG5Y6xRGHcveZVr+6txkH7M2e+nPFd6guSoN638v/1XQ0K06eOpi0ptVU/2tW/pIU4IoPotY/GZ9fw==
+  dependencies:
+    tinyrainbow "^1.2.0"
+
+"@vitest/runner@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/runner/-/runner-2.1.5.tgz#4d5e2ba2dfc0af74e4b0f9f3f8be020559b26ea9"
+  integrity sha512-pKHKy3uaUdh7X6p1pxOkgkVAFW7r2I818vHDthYLvUyjRfkKOU6P45PztOch4DZarWQne+VOaIMwA/erSSpB9g==
+  dependencies:
+    "@vitest/utils" "2.1.5"
+    pathe "^1.1.2"
+
+"@vitest/snapshot@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/snapshot/-/snapshot-2.1.5.tgz#a09a8712547452a84e08b3ec97b270d9cc156b4f"
+  integrity sha512-zmYw47mhfdfnYbuhkQvkkzYroXUumrwWDGlMjpdUr4jBd3HZiV2w7CQHj+z7AAS4VOtWxI4Zt4bWt4/sKcoIjg==
+  dependencies:
+    "@vitest/pretty-format" "2.1.5"
+    magic-string "^0.30.12"
+    pathe "^1.1.2"
+
+"@vitest/spy@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/spy/-/spy-2.1.5.tgz#f790d1394a5030644217ce73562e92465e83147e"
+  integrity sha512-aWZF3P0r3w6DiYTVskOYuhBc7EMc3jvn1TkBg8ttylFFRqNN2XGD7V5a4aQdk6QiUzZQ4klNBSpCLJgWNdIiNw==
+  dependencies:
+    tinyspy "^3.0.2"
+
+"@vitest/utils@2.1.5":
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/@vitest/utils/-/utils-2.1.5.tgz#0e19ce677c870830a1573d33ee86b0d6109e9546"
+  integrity sha512-yfj6Yrp0Vesw2cwJbP+cl04OC+IHFsuQsrsJBL9pyGeQXE56v1UAOQco+SR55Vf1nQzfV0QJg1Qum7AaWUwwYg==
+  dependencies:
+    "@vitest/pretty-format" "2.1.5"
+    loupe "^3.1.2"
+    tinyrainbow "^1.2.0"
+
 abort-controller@^3.0.0:
   version "3.0.0"
   resolved "https://registry.yarnpkg.com/abort-controller/-/abort-controller-3.0.0.tgz#eaf54d53b62bae4138e809ca225c8439a6efb392"
@@ -219,11 +582,28 @@ agentkeepalive@^4.2.1:
   dependencies:
     humanize-ms "^1.2.1"
 
+ansi-styles@^4.1.0:
+  version "4.3.0"
+  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-4.3.0.tgz#edd803628ae71c04c85ae7a0906edad34b648937"
+  integrity sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==
+  dependencies:
+    color-convert "^2.0.1"
+
+ansi-styles@^5.0.0:
+  version "5.2.0"
+  resolved "https://registry.yarnpkg.com/ansi-styles/-/ansi-styles-5.2.0.tgz#07449690ad45777d1924ac2abb2fc8895dba836b"
+  integrity sha512-Cxwpt2SfTzTtXcfOlzGEee8O+c+MmUgGrNiBcXnuWxuFJHe6a5Hz7qwhwe5OgaSYI0IJvkLqWX1ASG+cJOkEiA==
+
 arg@^4.1.0:
   version "4.1.3"
   resolved "https://registry.yarnpkg.com/arg/-/arg-4.1.3.tgz#269fc7ad5b8e42cb63c896d5666017261c144089"
   integrity sha512-58S9QDqG0Xx27YwPSt9fJxivjYl432YCwfDMfZ+71RAqUrZef7LrKQZ3LHLOwCS4FLNBplP533Zx895SeOCHvA==
 
+assertion-error@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/assertion-error/-/assertion-error-2.0.1.tgz#f641a196b335690b1070bf00b6e7593fec190bf7"
+  integrity sha512-Izi8RQcffqCeNVgFigKli1ssklIbpHnCYc6AknXGYoB6grJqyeby7jv12JUQgmTAnIDnbck1uxksT4dzN3PWBA==
+
 asynckit@^0.4.0:
   version "0.4.0"
   resolved "https://registry.yarnpkg.com/asynckit/-/asynckit-0.4.0.tgz#c79ed97f7f34cb8f2ba1bc9790bcc366474b4b79"
@@ -251,11 +631,64 @@ brace-expansion@^2.0.1:
   dependencies:
     balanced-match "^1.0.0"
 
+braces@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/braces/-/braces-3.0.3.tgz#490332f40919452272d55a8480adc0c441358789"
+  integrity sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==
+  dependencies:
+    fill-range "^7.1.1"
+
+cac@^6.7.14:
+  version "6.7.14"
+  resolved "https://registry.yarnpkg.com/cac/-/cac-6.7.14.tgz#804e1e6f506ee363cb0e3ccbb09cad5dd9870959"
+  integrity sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==
+
+chai@^5.1.2:
+  version "5.1.2"
+  resolved "https://registry.yarnpkg.com/chai/-/chai-5.1.2.tgz#3afbc340b994ae3610ca519a6c70ace77ad4378d"
+  integrity sha512-aGtmf24DW6MLHHG5gCx4zaI3uBq3KRtxeVs0DjFH6Z0rDNbsvTxFASFvdj79pxjxZ8/5u3PIiN3IwEIQkiiuPw==
+  dependencies:
+    assertion-error "^2.0.1"
+    check-error "^2.1.1"
+    deep-eql "^5.0.1"
+    loupe "^3.1.0"
+    pathval "^2.0.0"
+
+chalk@^4.0.0:
+  version "4.1.2"
+  resolved "https://registry.yarnpkg.com/chalk/-/chalk-4.1.2.tgz#aac4e2b7734a740867aeb16bf02aad556a1e7a01"
+  integrity sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==
+  dependencies:
+    ansi-styles "^4.1.0"
+    supports-color "^7.1.0"
+
 charenc@0.0.2:
   version "0.0.2"
   resolved "https://registry.yarnpkg.com/charenc/-/charenc-0.0.2.tgz#c0a1d2f3a7092e03774bfa83f14c0fc5790a8667"
   integrity sha512-yrLQ/yVUFXkzg7EDQsPieE/53+0RlaWTs+wBrvW36cyilJ2SaDWfl4Yj7MtLTXleV9uEKefbAGUPv2/iWSooRA==
 
+check-error@^2.1.1:
+  version "2.1.1"
+  resolved "https://registry.yarnpkg.com/check-error/-/check-error-2.1.1.tgz#87eb876ae71ee388fa0471fe423f494be1d96ccc"
+  integrity sha512-OAlb+T7V4Op9OwdkjmguYRqncdlx5JiofwOAUkmTF+jNdHwzTaTs4sRAGpzLF3oOz5xAyDGrPgeIDFQmDOTiJw==
+
+ci-info@^3.2.0:
+  version "3.9.0"
+  resolved "https://registry.yarnpkg.com/ci-info/-/ci-info-3.9.0.tgz#4279a62028a7b1f262f3473fc9605f5e218c59b4"
+  integrity sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==
+
+color-convert@^2.0.1:
+  version "2.0.1"
+  resolved "https://registry.yarnpkg.com/color-convert/-/color-convert-2.0.1.tgz#72d3a68d598c9bdb3af2ad1e84f21d896abd4de3"
+  integrity sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==
+  dependencies:
+    color-name "~1.1.4"
+
+color-name@~1.1.4:
+  version "1.1.4"
+  resolved "https://registry.yarnpkg.com/color-name/-/color-name-1.1.4.tgz#c2a09a87acbde69543de6f63fa3995c826c536a2"
+  integrity sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==
+
 combined-stream@^1.0.8:
   version "1.0.8"
   resolved "https://registry.yarnpkg.com/combined-stream/-/combined-stream-1.0.8.tgz#c3d45a8b34fd730631a110a8a2520682b31d5a7f"
@@ -273,6 +706,18 @@ crypt@0.0.2:
   resolved "https://registry.yarnpkg.com/crypt/-/crypt-0.0.2.tgz#88d7ff7ec0dfb86f713dc87bbb42d044d3e6c41b"
   integrity sha512-mCxBlsHFYh9C+HVpiEacem8FEBnMXgU9gy4zmNC+SXAZNB/1idgp/aulFJ4FgCi7GPEVbfyng092GqL2k2rmow==
 
+debug@^4.3.7:
+  version "4.3.7"
+  resolved "https://registry.yarnpkg.com/debug/-/debug-4.3.7.tgz#87945b4151a011d76d95a198d7111c865c360a52"
+  integrity sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==
+  dependencies:
+    ms "^2.1.3"
+
+deep-eql@^5.0.1:
+  version "5.0.2"
+  resolved "https://registry.yarnpkg.com/deep-eql/-/deep-eql-5.0.2.tgz#4b756d8d770a9257300825d52a2c2cff99c3a341"
+  integrity sha512-h5k/5U50IJJFpzfL6nO9jaaumfjO/f2NjK/oYB2Djzm4p9L+3T9qWpZqZ2hAbLPuuYq9wrU08WQyBTL5GbPk5Q==
+
 delayed-stream@~1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/delayed-stream/-/delayed-stream-1.0.0.tgz#df3ae199acadfb7d440aaae0b29e2272b24ec619"
@@ -283,6 +728,11 @@ deprecation@^2.0.0, deprecation@^2.3.1:
   resolved "https://registry.yarnpkg.com/deprecation/-/deprecation-2.3.1.tgz#6368cbdb40abf3373b525ac87e4a260c3a700919"
   integrity sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==
 
+diff-sequences@^29.6.3:
+  version "29.6.3"
+  resolved "https://registry.yarnpkg.com/diff-sequences/-/diff-sequences-29.6.3.tgz#4deaf894d11407c51efc8418012f9e70b84ea921"
+  integrity sha512-EjePK1srD3P08o2j4f0ExnylqRs5B9tJjcp9t1krH2qRi8CCdsYfwe9JgSLurFBWwq4uOlipzfk5fHNvwFKr8Q==
+
 diff@^4.0.1:
   version "4.0.2"
   resolved "https://registry.yarnpkg.com/diff/-/diff-4.0.2.tgz#60f3aecb89d5fae520c11aa19efc2bb982aade7d"
@@ -296,11 +746,80 @@ digest-fetch@^1.3.0:
     base-64 "^0.1.0"
     md5 "^2.3.0"
 
+es-module-lexer@^1.5.4:
+  version "1.5.4"
+  resolved "https://registry.yarnpkg.com/es-module-lexer/-/es-module-lexer-1.5.4.tgz#a8efec3a3da991e60efa6b633a7cad6ab8d26b78"
+  integrity sha512-MVNK56NiMrOwitFB7cqDwq0CQutbw+0BvLshJSse0MUNU+y1FC3bUS/AQg7oUng+/wKrrki7JfmwtVHkVfPLlw==
+
+esbuild@^0.21.3:
+  version "0.21.5"
+  resolved "https://registry.yarnpkg.com/esbuild/-/esbuild-0.21.5.tgz#9ca301b120922959b766360d8ac830da0d02997d"
+  integrity sha512-mg3OPMV4hXywwpoDxu3Qda5xCKQi+vCTZq8S9J/EpkhB2HzKXq4SNFZE3+NK93JYxc8VMSep+lOUSC/RVKaBqw==
+  optionalDependencies:
+    "@esbuild/aix-ppc64" "0.21.5"
+    "@esbuild/android-arm" "0.21.5"
+    "@esbuild/android-arm64" "0.21.5"
+    "@esbuild/android-x64" "0.21.5"
+    "@esbuild/darwin-arm64" "0.21.5"
+    "@esbuild/darwin-x64" "0.21.5"
+    "@esbuild/freebsd-arm64" "0.21.5"
+    "@esbuild/freebsd-x64" "0.21.5"
+    "@esbuild/linux-arm" "0.21.5"
+    "@esbuild/linux-arm64" "0.21.5"
+    "@esbuild/linux-ia32" "0.21.5"
+    "@esbuild/linux-loong64" "0.21.5"
+    "@esbuild/linux-mips64el" "0.21.5"
+    "@esbuild/linux-ppc64" "0.21.5"
+    "@esbuild/linux-riscv64" "0.21.5"
+    "@esbuild/linux-s390x" "0.21.5"
+    "@esbuild/linux-x64" "0.21.5"
+    "@esbuild/netbsd-x64" "0.21.5"
+    "@esbuild/openbsd-x64" "0.21.5"
+    "@esbuild/sunos-x64" "0.21.5"
+    "@esbuild/win32-arm64" "0.21.5"
+    "@esbuild/win32-ia32" "0.21.5"
+    "@esbuild/win32-x64" "0.21.5"
+
+escape-string-regexp@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/escape-string-regexp/-/escape-string-regexp-2.0.0.tgz#a30304e99daa32e23b2fd20f51babd07cffca344"
+  integrity sha512-UpzcLCXolUWcNu5HtVMHYdXJjArjsF9C0aNnquZYY4uW/Vu0miy5YoWvbV345HauVvcAUnpRuhMMcqTcGOY2+w==
+
+estree-walker@^3.0.3:
+  version "3.0.3"
+  resolved "https://registry.yarnpkg.com/estree-walker/-/estree-walker-3.0.3.tgz#67c3e549ec402a487b4fc193d1953a524752340d"
+  integrity sha512-7RUKfXgSMMkzt6ZuXmqapOurLGPPfgj6l9uRZ7lRGolvk0y2yocc35LdcxKC5PQZdn2DMqioAQ2NoWcrTKmm6g==
+  dependencies:
+    "@types/estree" "^1.0.0"
+
 event-target-shim@^5.0.0:
   version "5.0.1"
   resolved "https://registry.yarnpkg.com/event-target-shim/-/event-target-shim-5.0.1.tgz#5d4d3ebdf9583d63a5333ce2deb7480ab2b05789"
   integrity sha512-i/2XbnSz/uxRCU6+NdVJgKWDTM427+MqYbkQzD321DuCQJUqOuJKIA0IM2+W2xtYHdKOmZ4dR6fExsd4SXL+WQ==
 
+expect-type@^1.1.0:
+  version "1.1.0"
+  resolved "https://registry.yarnpkg.com/expect-type/-/expect-type-1.1.0.tgz#a146e414250d13dfc49eafcfd1344a4060fa4c75"
+  integrity sha512-bFi65yM+xZgk+u/KRIpekdSYkTB5W1pEf0Lt8Q8Msh7b+eQ7LXVtIB1Bkm4fvclDEL1b2CZkMhv2mOeF8tMdkA==
+
+expect@^29.0.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/expect/-/expect-29.7.0.tgz#578874590dcb3214514084c08115d8aee61e11bc"
+  integrity sha512-2Zks0hf1VLFYI1kbh0I5jP3KHHyCHpkfyHBzsSXRFgl/Bg9mWYfMW8oD+PdMPlEwy5HNsR9JutYy6pMeOh61nw==
+  dependencies:
+    "@jest/expect-utils" "^29.7.0"
+    jest-get-type "^29.6.3"
+    jest-matcher-utils "^29.7.0"
+    jest-message-util "^29.7.0"
+    jest-util "^29.7.0"
+
+fill-range@^7.1.1:
+  version "7.1.1"
+  resolved "https://registry.yarnpkg.com/fill-range/-/fill-range-7.1.1.tgz#44265d3cac07e3ea7dc247516380643754a05292"
+  integrity sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==
+  dependencies:
+    to-regex-range "^5.0.1"
+
 form-data-encoder@1.7.2:
   version "1.7.2"
   resolved "https://registry.yarnpkg.com/form-data-encoder/-/form-data-encoder-1.7.2.tgz#1f1ae3dccf58ed4690b86d87e4f57c654fbab040"
@@ -323,6 +842,21 @@ formdata-node@^4.3.2:
     node-domexception "1.0.0"
     web-streams-polyfill "4.0.0-beta.3"
 
+fsevents@~2.3.2, fsevents@~2.3.3:
+  version "2.3.3"
+  resolved "https://registry.yarnpkg.com/fsevents/-/fsevents-2.3.3.tgz#cac6407785d03675a2a5e1a5305c697b347d90d6"
+  integrity sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==
+
+graceful-fs@^4.2.9:
+  version "4.2.11"
+  resolved "https://registry.yarnpkg.com/graceful-fs/-/graceful-fs-4.2.11.tgz#4183e4e8bf08bb6e05bbb2f7d2e0c8f712ca40e3"
+  integrity sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==
+
+has-flag@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.yarnpkg.com/has-flag/-/has-flag-4.0.0.tgz#944771fd9c81c81265c4d6941860da06bb59479b"
+  integrity sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==
+
 humanize-ms@^1.2.1:
   version "1.2.1"
   resolved "https://registry.yarnpkg.com/humanize-ms/-/humanize-ms-1.2.1.tgz#c46e3159a293f6b896da29316d8b6fe8bb79bbed"
@@ -335,11 +869,85 @@ is-buffer@~1.1.6:
   resolved "https://registry.yarnpkg.com/is-buffer/-/is-buffer-1.1.6.tgz#efaa2ea9daa0d7ab2ea13a97b2b8ad51fefbe8be"
   integrity sha512-NcdALwpXkTm5Zvvbk7owOUSvVvBKDgKP5/ewfXEznmQFfs4ZRmanOeKBTjRVjka3QFoN6XJ+9F3USqfHqTaU5w==
 
+is-number@^7.0.0:
+  version "7.0.0"
+  resolved "https://registry.yarnpkg.com/is-number/-/is-number-7.0.0.tgz#7535345b896734d5f80c4d06c50955527a14f12b"
+  integrity sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==
+
 is-plain-object@^5.0.0:
   version "5.0.0"
   resolved "https://registry.yarnpkg.com/is-plain-object/-/is-plain-object-5.0.0.tgz#4427f50ab3429e9025ea7d52e9043a9ef4159344"
   integrity sha512-VRSzKkbMm5jMDoKLbltAkFQ5Qr7VDiTFGXxYFXXowVj387GeGNOCsOH6Msy00SGZ3Fp84b1Naa1psqgcCIEP5Q==
 
+jest-diff@^29.7.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/jest-diff/-/jest-diff-29.7.0.tgz#017934a66ebb7ecf6f205e84699be10afd70458a"
+  integrity sha512-LMIgiIrhigmPrs03JHpxUh2yISK3vLFPkAodPeo0+BuF7wA2FoQbkEg1u8gBYBThncu7e1oEDUfIXVuTqLRUjw==
+  dependencies:
+    chalk "^4.0.0"
+    diff-sequences "^29.6.3"
+    jest-get-type "^29.6.3"
+    pretty-format "^29.7.0"
+
+jest-get-type@^29.6.3:
+  version "29.6.3"
+  resolved "https://registry.yarnpkg.com/jest-get-type/-/jest-get-type-29.6.3.tgz#36f499fdcea197c1045a127319c0481723908fd1"
+  integrity sha512-zrteXnqYxfQh7l5FHyL38jL39di8H8rHoecLH3JNxH3BwOrBsNeabdap5e0I23lD4HHI8W5VFBZqG4Eaq5LNcw==
+
+jest-matcher-utils@^29.7.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/jest-matcher-utils/-/jest-matcher-utils-29.7.0.tgz#ae8fec79ff249fd592ce80e3ee474e83a6c44f12"
+  integrity sha512-sBkD+Xi9DtcChsI3L3u0+N0opgPYnCRPtGcQYrgXmR+hmt/fYfWAL0xRXYU8eWOdfuLgBe0YCW3AFtnRLagq/g==
+  dependencies:
+    chalk "^4.0.0"
+    jest-diff "^29.7.0"
+    jest-get-type "^29.6.3"
+    pretty-format "^29.7.0"
+
+jest-message-util@^29.7.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/jest-message-util/-/jest-message-util-29.7.0.tgz#8bc392e204e95dfe7564abbe72a404e28e51f7f3"
+  integrity sha512-GBEV4GRADeP+qtB2+6u61stea8mGcOT4mCtrYISZwfu9/ISHFJ/5zOMXYbpBE9RsS5+Gb63DW4FgmnKJ79Kf6w==
+  dependencies:
+    "@babel/code-frame" "^7.12.13"
+    "@jest/types" "^29.6.3"
+    "@types/stack-utils" "^2.0.0"
+    chalk "^4.0.0"
+    graceful-fs "^4.2.9"
+    micromatch "^4.0.4"
+    pretty-format "^29.7.0"
+    slash "^3.0.0"
+    stack-utils "^2.0.3"
+
+jest-util@^29.7.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/jest-util/-/jest-util-29.7.0.tgz#23c2b62bfb22be82b44de98055802ff3710fc0bc"
+  integrity sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==
+  dependencies:
+    "@jest/types" "^29.6.3"
+    "@types/node" "*"
+    chalk "^4.0.0"
+    ci-info "^3.2.0"
+    graceful-fs "^4.2.9"
+    picomatch "^2.2.3"
+
+js-tokens@^4.0.0:
+  version "4.0.0"
+  resolved "https://registry.yarnpkg.com/js-tokens/-/js-tokens-4.0.0.tgz#19203fb59991df98e3a287050d4647cdeaf32499"
+  integrity sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==
+
+loupe@^3.1.0, loupe@^3.1.2:
+  version "3.1.2"
+  resolved "https://registry.yarnpkg.com/loupe/-/loupe-3.1.2.tgz#c86e0696804a02218f2206124c45d8b15291a240"
+  integrity sha512-23I4pFZHmAemUnz8WZXbYRSKYj801VDaNv9ETuMh7IrMc7VuVVSo+Z9iLE3ni30+U48iDWfi30d3twAXBYmnCg==
+
+magic-string@^0.30.12:
+  version "0.30.13"
+  resolved "https://registry.yarnpkg.com/magic-string/-/magic-string-0.30.13.tgz#92438e3ff4946cf54f18247c981e5c161c46683c"
+  integrity sha512-8rYBO+MsWkgjDSOvLomYnzhdwEG51olQ4zL5KXnNJWV5MNmrb4rTZdrtkhxjnD/QyZUqR/Z/XDsUs/4ej2nx0g==
+  dependencies:
+    "@jridgewell/sourcemap-codec" "^1.5.0"
+
 make-error@^1.1.1:
   version "1.3.6"
   resolved "https://registry.yarnpkg.com/make-error/-/make-error-1.3.6.tgz#2eb2e37ea9b67c4891f684a1394799af484cf7a2"
@@ -354,6 +962,14 @@ md5@^2.3.0:
     crypt "0.0.2"
     is-buffer "~1.1.6"
 
+micromatch@^4.0.4:
+  version "4.0.8"
+  resolved "https://registry.yarnpkg.com/micromatch/-/micromatch-4.0.8.tgz#d66fa18f3a47076789320b9b1af32bd86d9fa202"
+  integrity sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==
+  dependencies:
+    braces "^3.0.3"
+    picomatch "^2.3.1"
+
 mime-db@1.52.0:
   version "1.52.0"
   resolved "https://registry.yarnpkg.com/mime-db/-/mime-db-1.52.0.tgz#bbabcdc02859f4987301c856e3387ce5ec43bf70"
@@ -373,11 +989,16 @@ minimatch@^7.4.2:
   dependencies:
     brace-expansion "^2.0.1"
 
-ms@^2.0.0:
+ms@^2.0.0, ms@^2.1.3:
   version "2.1.3"
   resolved "https://registry.yarnpkg.com/ms/-/ms-2.1.3.tgz#574c8138ce1d2b5861f0b44579dbadd60c6615b2"
   integrity sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==
 
+nanoid@^3.3.7:
+  version "3.3.7"
+  resolved "https://registry.yarnpkg.com/nanoid/-/nanoid-3.3.7.tgz#d0c301a691bc8d54efa0a2226ccf3fe2fd656bd8"
+  integrity sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==
+
 node-domexception@1.0.0:
   version "1.0.0"
   resolved "https://registry.yarnpkg.com/node-domexception/-/node-domexception-1.0.0.tgz#6888db46a1f71c0b76b3f7555016b63fe64766e5"
@@ -417,11 +1038,152 @@ parse-diff@^0.11.1:
   resolved "https://registry.yarnpkg.com/parse-diff/-/parse-diff-0.11.1.tgz#d93ca2d225abed280782bccb1476711ca9dd84f0"
   integrity sha512-Oq4j8LAOPOcssanQkIjxosjATBIEJhCxMCxPhMu+Ci4wdNmAEdx0O+a7gzbR2PyKXgKPvRLIN5g224+dJAsKHA==
 
+pathe@^1.1.2:
+  version "1.1.2"
+  resolved "https://registry.yarnpkg.com/pathe/-/pathe-1.1.2.tgz#6c4cb47a945692e48a1ddd6e4094d170516437ec"
+  integrity sha512-whLdWMYL2TwI08hn8/ZqAbrVemu0LNaNNJZX73O6qaIdCTfXutsLhMkjdENX0qhsQ9uIimo4/aQOmXkoon2nDQ==
+
+pathval@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/pathval/-/pathval-2.0.0.tgz#7e2550b422601d4f6b8e26f1301bc8f15a741a25"
+  integrity sha512-vE7JKRyES09KiunauX7nd2Q9/L7lhok4smP9RZTDeD4MVs72Dp2qNFVz39Nz5a0FVEW0BJR6C0DYrq6unoziZA==
+
+picocolors@^1.0.0, picocolors@^1.1.1:
+  version "1.1.1"
+  resolved "https://registry.yarnpkg.com/picocolors/-/picocolors-1.1.1.tgz#3d321af3eab939b083c8f929a1d12cda81c26b6b"
+  integrity sha512-xceH2snhtb5M9liqDsmEw56le376mTZkEX/jEb/RxNFyegNul7eNslCXP9FDj/Lcu0X8KEyMceP2ntpaHrDEVA==
+
+picomatch@^2.2.3, picomatch@^2.3.1:
+  version "2.3.1"
+  resolved "https://registry.yarnpkg.com/picomatch/-/picomatch-2.3.1.tgz#3ba3833733646d9d3e4995946c1365a67fb07a42"
+  integrity sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==
+
+postcss@^8.4.43:
+  version "8.4.49"
+  resolved "https://registry.yarnpkg.com/postcss/-/postcss-8.4.49.tgz#4ea479048ab059ab3ae61d082190fabfd994fe19"
+  integrity sha512-OCVPnIObs4N29kxTjzLfUryOkvZEq+pf8jTF0lg8E7uETuWHA+v7j3c/xJmiqpX450191LlmZfUKkXxkTry7nA==
+  dependencies:
+    nanoid "^3.3.7"
+    picocolors "^1.1.1"
+    source-map-js "^1.2.1"
+
 prettier@^2.8.6:
   version "2.8.6"
   resolved "https://registry.yarnpkg.com/prettier/-/prettier-2.8.6.tgz#5c174b29befd507f14b83e3c19f83fdc0e974b71"
   integrity sha512-mtuzdiBbHwPEgl7NxWlqOkithPyp4VN93V7VeHVWBF+ad3I5avc0RVDT4oImXQy9H/AqxA2NSQH8pSxHW6FYbQ==
 
+pretty-format@^29.0.0, pretty-format@^29.7.0:
+  version "29.7.0"
+  resolved "https://registry.yarnpkg.com/pretty-format/-/pretty-format-29.7.0.tgz#ca42c758310f365bfa71a0bda0a807160b776812"
+  integrity sha512-Pdlw/oPxN+aXdmM9R00JVC9WVFoCLTKJvDVLgmJ+qAffBMxsV85l/Lu7sNx4zSzPyoL2euImuEwHhOXdEgNFZQ==
+  dependencies:
+    "@jest/schemas" "^29.6.3"
+    ansi-styles "^5.0.0"
+    react-is "^18.0.0"
+
+react-is@^18.0.0:
+  version "18.3.1"
+  resolved "https://registry.yarnpkg.com/react-is/-/react-is-18.3.1.tgz#e83557dc12eae63a99e003a46388b1dcbb44db7e"
+  integrity sha512-/LLMVyas0ljjAtoYiPqYiL8VWXzUUdThrmU5+n20DZv+a+ClRoevUzw5JxU+Ieh5/c87ytoTBV9G1FiKfNJdmg==
+
+rollup@^4.20.0:
+  version "4.27.4"
+  resolved "https://registry.yarnpkg.com/rollup/-/rollup-4.27.4.tgz#b23e4ef4fe4d0d87f5237dacf63f95a499503897"
+  integrity sha512-RLKxqHEMjh/RGLsDxAEsaLO3mWgyoU6x9w6n1ikAzet4B3gI2/3yP6PWY2p9QzRTh6MfEIXB3MwsOY0Iv3vNrw==
+  dependencies:
+    "@types/estree" "1.0.6"
+  optionalDependencies:
+    "@rollup/rollup-android-arm-eabi" "4.27.4"
+    "@rollup/rollup-android-arm64" "4.27.4"
+    "@rollup/rollup-darwin-arm64" "4.27.4"
+    "@rollup/rollup-darwin-x64" "4.27.4"
+    "@rollup/rollup-freebsd-arm64" "4.27.4"
+    "@rollup/rollup-freebsd-x64" "4.27.4"
+    "@rollup/rollup-linux-arm-gnueabihf" "4.27.4"
+    "@rollup/rollup-linux-arm-musleabihf" "4.27.4"
+    "@rollup/rollup-linux-arm64-gnu" "4.27.4"
+    "@rollup/rollup-linux-arm64-musl" "4.27.4"
+    "@rollup/rollup-linux-powerpc64le-gnu" "4.27.4"
+    "@rollup/rollup-linux-riscv64-gnu" "4.27.4"
+    "@rollup/rollup-linux-s390x-gnu" "4.27.4"
+    "@rollup/rollup-linux-x64-gnu" "4.27.4"
+    "@rollup/rollup-linux-x64-musl" "4.27.4"
+    "@rollup/rollup-win32-arm64-msvc" "4.27.4"
+    "@rollup/rollup-win32-ia32-msvc" "4.27.4"
+    "@rollup/rollup-win32-x64-msvc" "4.27.4"
+    fsevents "~2.3.2"
+
+siginfo@^2.0.0:
+  version "2.0.0"
+  resolved "https://registry.yarnpkg.com/siginfo/-/siginfo-2.0.0.tgz#32e76c70b79724e3bb567cb9d543eb858ccfaf30"
+  integrity sha512-ybx0WO1/8bSBLEWXZvEd7gMW3Sn3JFlW3TvX1nREbDLRNQNaeNN8WK0meBwPdAaOI7TtRRRJn/Es1zhrrCHu7g==
+
+slash@^3.0.0:
+  version "3.0.0"
+  resolved "https://registry.yarnpkg.com/slash/-/slash-3.0.0.tgz#6539be870c165adbd5240220dbe361f1bc4d4634"
+  integrity sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==
+
+source-map-js@^1.2.1:
+  version "1.2.1"
+  resolved "https://registry.yarnpkg.com/source-map-js/-/source-map-js-1.2.1.tgz#1ce5650fddd87abc099eda37dcff024c2667ae46"
+  integrity sha512-UXWMKhLOwVKb728IUtQPXxfYU+usdybtUrK/8uGE8CQMvrhOpwvzDBwj0QhSL7MQc7vIsISBG8VQ8+IDQxpfQA==
+
+stack-utils@^2.0.3:
+  version "2.0.6"
+  resolved "https://registry.yarnpkg.com/stack-utils/-/stack-utils-2.0.6.tgz#aaf0748169c02fc33c8232abccf933f54a1cc34f"
+  integrity sha512-XlkWvfIm6RmsWtNJx+uqtKLS8eqFbxUg0ZzLXqY0caEy9l7hruX8IpiDnjsLavoBgqCCR71TqWO8MaXYheJ3RQ==
+  dependencies:
+    escape-string-regexp "^2.0.0"
+
+stackback@0.0.2:
+  version "0.0.2"
+  resolved "https://registry.yarnpkg.com/stackback/-/stackback-0.0.2.tgz#1ac8a0d9483848d1695e418b6d031a3c3ce68e3b"
+  integrity sha512-1XMJE5fQo1jGH6Y/7ebnwPOBEkIEnT4QF32d5R1+VXdXveM0IBMJt8zfaxX1P3QhVwrYe+576+jkANtSS2mBbw==
+
+std-env@^3.8.0:
+  version "3.8.0"
+  resolved "https://registry.yarnpkg.com/std-env/-/std-env-3.8.0.tgz#b56ffc1baf1a29dcc80a3bdf11d7fca7c315e7d5"
+  integrity sha512-Bc3YwwCB+OzldMxOXJIIvC6cPRWr/LxOp48CdQTOkPyk/t4JWWJbrilwBd7RJzKV8QW7tJkcgAmeuLLJugl5/w==
+
+supports-color@^7.1.0:
+  version "7.2.0"
+  resolved "https://registry.yarnpkg.com/supports-color/-/supports-color-7.2.0.tgz#1b7dcdcb32b8138801b3e478ba6a51caa89648da"
+  integrity sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==
+  dependencies:
+    has-flag "^4.0.0"
+
+tinybench@^2.9.0:
+  version "2.9.0"
+  resolved "https://registry.yarnpkg.com/tinybench/-/tinybench-2.9.0.tgz#103c9f8ba6d7237a47ab6dd1dcff77251863426b"
+  integrity sha512-0+DUvqWMValLmha6lr4kD8iAMK1HzV0/aKnCtWb9v9641TnP/MFb7Pc2bxoxQjTXAErryXVgUOfv2YqNllqGeg==
+
+tinyexec@^0.3.1:
+  version "0.3.1"
+  resolved "https://registry.yarnpkg.com/tinyexec/-/tinyexec-0.3.1.tgz#0ab0daf93b43e2c211212396bdb836b468c97c98"
+  integrity sha512-WiCJLEECkO18gwqIp6+hJg0//p23HXp4S+gGtAKu3mI2F2/sXC4FvHvXvB0zJVVaTPhx1/tOwdbRsa1sOBIKqQ==
+
+tinypool@^1.0.1:
+  version "1.0.2"
+  resolved "https://registry.yarnpkg.com/tinypool/-/tinypool-1.0.2.tgz#706193cc532f4c100f66aa00b01c42173d9051b2"
+  integrity sha512-al6n+QEANGFOMf/dmUMsuS5/r9B06uwlyNjZZql/zv8J7ybHCgoihBNORZCY2mzUuAnomQa2JdhyHKzZxPCrFA==
+
+tinyrainbow@^1.2.0:
+  version "1.2.0"
+  resolved "https://registry.yarnpkg.com/tinyrainbow/-/tinyrainbow-1.2.0.tgz#5c57d2fc0fb3d1afd78465c33ca885d04f02abb5"
+  integrity sha512-weEDEq7Z5eTHPDh4xjX789+fHfF+P8boiFB+0vbWzpbnbsEr/GRaohi/uMKxg8RZMXnl1ItAi/IUHWMsjDV7kQ==
+
+tinyspy@^3.0.2:
+  version "3.0.2"
+  resolved "https://registry.yarnpkg.com/tinyspy/-/tinyspy-3.0.2.tgz#86dd3cf3d737b15adcf17d7887c84a75201df20a"
+  integrity sha512-n1cw8k1k0x4pgA2+9XrOkFydTerNcJ1zWCO5Nn9scWHTD+5tp8dghT2x1uduQePZTZgd3Tupf+x9BxJjeJi77Q==
+
+to-regex-range@^5.0.1:
+  version "5.0.1"
+  resolved "https://registry.yarnpkg.com/to-regex-range/-/to-regex-range-5.0.1.tgz#1648c44aae7c8d988a326018ed72f5b4dd0392e4"
+  integrity sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==
+  dependencies:
+    is-number "^7.0.0"
+
 tr46@~0.0.3:
   version "0.0.3"
   resolved "https://registry.yarnpkg.com/tr46/-/tr46-0.0.3.tgz#8184fd347dac9cdc185992f3a6622e14b9d9ab6a"
@@ -476,6 +1238,54 @@ v8-compile-cache-lib@^3.0.1:
   resolved "https://registry.yarnpkg.com/v8-compile-cache-lib/-/v8-compile-cache-lib-3.0.1.tgz#6336e8d71965cb3d35a1bbb7868445a7c05264bf"
   integrity sha512-wa7YjyUGfNZngI/vtK0UHAN+lgDCxBPCylVXGp0zu59Fz5aiGtNXaq3DhIov063MorB+VfufLh3JlF2KdTK3xg==
 
+vite-node@2.1.5:
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/vite-node/-/vite-node-2.1.5.tgz#cf28c637b2ebe65921f3118a165b7cf00a1cdf19"
+  integrity sha512-rd0QIgx74q4S1Rd56XIiL2cYEdyWn13cunYBIuqh9mpmQr7gGS0IxXoP8R6OaZtNQQLyXSWbd4rXKYUbhFpK5w==
+  dependencies:
+    cac "^6.7.14"
+    debug "^4.3.7"
+    es-module-lexer "^1.5.4"
+    pathe "^1.1.2"
+    vite "^5.0.0"
+
+vite@^5.0.0:
+  version "5.4.11"
+  resolved "https://registry.yarnpkg.com/vite/-/vite-5.4.11.tgz#3b415cd4aed781a356c1de5a9ebafb837715f6e5"
+  integrity sha512-c7jFQRklXua0mTzneGW9QVyxFjUgwcihC4bXEtujIo2ouWCe1Ajt/amn2PCxYnhYfd5k09JX3SB7OYWFKYqj8Q==
+  dependencies:
+    esbuild "^0.21.3"
+    postcss "^8.4.43"
+    rollup "^4.20.0"
+  optionalDependencies:
+    fsevents "~2.3.3"
+
+vitest@^2.1.5:
+  version "2.1.5"
+  resolved "https://registry.yarnpkg.com/vitest/-/vitest-2.1.5.tgz#a93b7b84a84650130727baae441354e6df118148"
+  integrity sha512-P4ljsdpuzRTPI/kbND2sDZ4VmieerR2c9szEZpjc+98Z9ebvnXmM5+0tHEKqYZumXqlvnmfWsjeFOjXVriDG7A==
+  dependencies:
+    "@vitest/expect" "2.1.5"
+    "@vitest/mocker" "2.1.5"
+    "@vitest/pretty-format" "^2.1.5"
+    "@vitest/runner" "2.1.5"
+    "@vitest/snapshot" "2.1.5"
+    "@vitest/spy" "2.1.5"
+    "@vitest/utils" "2.1.5"
+    chai "^5.1.2"
+    debug "^4.3.7"
+    expect-type "^1.1.0"
+    magic-string "^0.30.12"
+    pathe "^1.1.2"
+    std-env "^3.8.0"
+    tinybench "^2.9.0"
+    tinyexec "^0.3.1"
+    tinypool "^1.0.1"
+    tinyrainbow "^1.2.0"
+    vite "^5.0.0"
+    vite-node "2.1.5"
+    why-is-node-running "^2.3.0"
+
 web-streams-polyfill@4.0.0-beta.3:
   version "4.0.0-beta.3"
   resolved "https://registry.yarnpkg.com/web-streams-polyfill/-/web-streams-polyfill-4.0.0-beta.3.tgz#2898486b74f5156095e473efe989dcf185047a38"
@@ -499,11 +1309,24 @@ whatwg-url@^5.0.0:
     tr46 "~0.0.3"
     webidl-conversions "^3.0.0"
 
+why-is-node-running@^2.3.0:
+  version "2.3.0"
+  resolved "https://registry.yarnpkg.com/why-is-node-running/-/why-is-node-running-2.3.0.tgz#a3f69a97107f494b3cdc3bdddd883a7d65cebf04"
+  integrity sha512-hUrmaWBdVDcxvYqnyh09zunKzROWjbZTiNy8dBEjkS7ehEDQibXJ7XvlmtbwuTclUiIyN+CyXQD4Vmko8fNm8w==
+  dependencies:
+    siginfo "^2.0.0"
+    stackback "0.0.2"
+
 wrappy@1:
   version "1.0.2"
   resolved "https://registry.yarnpkg.com/wrappy/-/wrappy-1.0.2.tgz#b5243d8f3ec1aa35f1364605bc0d1036e30ab69f"
   integrity sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==
 
+yaml@^2.6.1:
+  version "2.6.1"
+  resolved "https://registry.yarnpkg.com/yaml/-/yaml-2.6.1.tgz#42f2b1ba89203f374609572d5349fb8686500773"
+  integrity sha512-7r0XPzioN/Q9kXBro/XPnA6kznR73DHq+GXh5ON7ZozRO6aMjbmiBuKste2wslTFkC5d1dw0GooOCepZXJ2SAg==
+
 yn@3.1.1:
   version "3.1.1"
   resolved "https://registry.yarnpkg.com/yn/-/yn-3.1.1.tgz#1e87401a09d767c1d5eab26a6e4c185182d2eb50"