From 9d8c7b4041799ff4995821122f3c4ce8934a36b7 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Mon, 4 Nov 2019 15:13:15 -0500 Subject: [PATCH 01/21] Release v1 --- README.md | 4 +- dist/restore/index.js | 5163 +++++++++++++++++++++++++++++++++++++++++ dist/save/index.js | 5140 ++++++++++++++++++++++++++++++++++++++++ examples.md | 26 +- package-lock.json | 2 +- package.json | 2 +- 6 files changed, 10320 insertions(+), 17 deletions(-) create mode 100644 dist/restore/index.js create mode 100644 dist/save/index.js diff --git a/README.md b/README.md index a289d65..ccf4bc5 100644 --- a/README.md +++ b/README.md @@ -36,7 +36,7 @@ jobs: - uses: actions/checkout@v1 - name: Cache node modules - uses: actions/cache@preview + uses: actions/cache@v1 with: path: node_modules key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} @@ -70,7 +70,7 @@ Example: steps: - uses: actions/checkout@v1 - - uses: actions/cache@preview + - uses: actions/cache@v1 id: cache with: path: path/to/dependencies diff --git a/dist/restore/index.js b/dist/restore/index.js new file mode 100644 index 0000000..6074ef1 --- /dev/null +++ b/dist/restore/index.js @@ -0,0 +1,5163 @@ +module.exports = +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(778); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ({ + +/***/ 1: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const childProcess = __webpack_require__(129); +const path = __webpack_require__(622); +const util_1 = __webpack_require__(669); +const ioUtil = __webpack_require__(672); +const exec = util_1.promisify(childProcess.exec); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`rd /s /q "${inputPath}"`); + } + else { + yield exec(`del /f /a "${inputPath}"`); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + } + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield exec(`rm -rf "${inputPath}"`); + } + else { + yield ioUtil.unlink(inputPath); + } + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + yield ioUtil.mkdirP(fsPath); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + } + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; + } + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; + } + } + return ''; + } + catch (err) { + throw new Error(`which failed with message ${err.message}`); + } + }); +} +exports.which = which; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + return { force, recursive }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 9: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __webpack_require__(87); +const events = __webpack_require__(614); +const child = __webpack_require__(129); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + }); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 12: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Basic ' + new Buffer(this.username + ':' + this.password).toString('base64'); + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; + + +/***/ }), + +/***/ 16: +/***/ (function(module) { + +module.exports = require("tls"); + +/***/ }), + +/***/ 87: +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), + +/***/ 105: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const httpm = __webpack_require__(874); +const util = __webpack_require__(729); +class RestClient { + /** + * Creates an instance of the RestClient + * @constructor + * @param {string} userAgent - userAgent for requests + * @param {string} baseUrl - (Optional) If not specified, use full urls per request. If supplied and a function passes a relative url, it will be appended to this + * @param {ifm.IRequestHandler[]} handlers - handlers are typically auth handlers (basic, bearer, ntlm supplied) + * @param {ifm.IRequestOptions} requestOptions - options for each http requests (http proxy setting, socket timeout) + */ + constructor(userAgent, baseUrl, handlers, requestOptions) { + this.client = new httpm.HttpClient(userAgent, handlers, requestOptions); + if (baseUrl) { + this._baseUrl = baseUrl; + } + } + /** + * Gets a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} requestUrl - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + options(requestUrl, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(requestUrl, this._baseUrl); + let res = yield this.client.options(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Gets a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified url or relative path + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + get(resource, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let res = yield this.client.get(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Deletes a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + del(resource, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let res = yield this.client.del(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Creates resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + create(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.post(url, data, headers); + return this._processResponse(res, options); + }); + } + /** + * Updates resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + update(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.patch(url, data, headers); + return this._processResponse(res, options); + }); + } + /** + * Replaces resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + replace(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.put(url, data, headers); + return this._processResponse(res, options); + }); + } + uploadStream(verb, requestUrl, stream, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(requestUrl, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let res = yield this.client.sendStream(verb, url, stream, headers); + return this._processResponse(res, options); + }); + } + _headersFromOptions(options, contentType) { + options = options || {}; + let headers = options.additionalHeaders || {}; + headers["Accept"] = options.acceptHeader || "application/json"; + if (contentType) { + let found = false; + for (let header in headers) { + if (header.toLowerCase() == "content-type") { + found = true; + } + } + if (!found) { + headers["Content-Type"] = 'application/json; charset=utf-8'; + } + } + return headers; + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == httpm.HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, RestClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + if (options && options.responseProcessor) { + response.result = options.responseProcessor(obj); + } + else { + response.result = obj; + } + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = "Failed request: (" + statusCode + ")"; + } + let err = new Error(msg); + // attach statusCode and body obj (if available) to the error object + err['statusCode'] = statusCode; + if (response.result) { + err['result'] = response.result; + } + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.RestClient = RestClient; + + +/***/ }), + +/***/ 129: +/***/ (function(module) { + +module.exports = require("child_process"); + +/***/ }), + +/***/ 139: +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __webpack_require__(417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 141: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false + }); + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode === 200) { + assert.equal(head.length, 0); + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + cb(socket); + } else { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 154: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const fs = __importStar(__webpack_require__(747)); +const Handlers_1 = __webpack_require__(941); +const HttpClient_1 = __webpack_require__(874); +const RestClient_1 = __webpack_require__(105); +function getCacheEntry(keys) { + return __awaiter(this, void 0, void 0, function* () { + const cacheUrl = getCacheUrl(); + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; + const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ + bearerCredentialHandler + ]); + const response = yield restClient.get(resource, getRequestOptions()); + if (response.statusCode === 204) { + return null; + } + if (response.statusCode !== 200) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + if (!cacheResult || !cacheResult.archiveLocation) { + throw new Error("Cache not found."); + } + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(cacheEntry, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createWriteStream(archivePath); + const httpClient = new HttpClient_1.HttpClient("actions/cache"); + const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + yield pipeResponseToStream(downloadResponse, stream); + }); +} +exports.downloadCache = downloadCache; +function pipeResponseToStream(response, stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => { + response.message.pipe(stream).on("close", () => { + resolve(); + }); + }); + }); +} +function saveCache(stream, key) { + return __awaiter(this, void 0, void 0, function* () { + const cacheUrl = getCacheUrl(); + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; + const postUrl = cacheUrl + resource; + const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ + bearerCredentialHandler + ]); + const requestOptions = getRequestOptions(); + requestOptions.additionalHeaders = { + "Content-Type": "application/octet-stream" + }; + const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); + if (response.statusCode !== 200) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + core.info("Cache saved successfully"); + }); +} +exports.saveCache = saveCache; +function getRequestOptions() { + const requestOptions = { + acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + }; + return requestOptions; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getCacheUrl() { + // Ideally we just use ACTIONS_CACHE_URL + let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + process.env["ACTIONS_RUNTIME_URL"] || + "").replace("pipelines", "artifactcache"); + if (!cacheUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); + } + core.debug(`Cache Url: ${cacheUrl}`); + return cacheUrl; +} + + +/***/ }), + +/***/ 211: +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), + +/***/ 327: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Basic ' + new Buffer('PAT:' + this.token).toString('base64'); + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; + + +/***/ }), + +/***/ 357: +/***/ (function(module) { + +module.exports = require("assert"); + +/***/ }), + +/***/ 413: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + +/***/ }), + +/***/ 417: +/***/ (function(module) { + +module.exports = require("crypto"); + +/***/ }), + +/***/ 431: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __webpack_require__(87); +/** + * Commands + * + * Command Format: + * ##[name key=value;key=value]message + * + * Examples: + * ##[warning]This is the user warning message + * ##[set-secret name=mypassword]definitelyNotAPassword! + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + // safely append the val - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + cmdStr += `${key}=${escape(`${val || ''}`)},`; + } + } + } + } + cmdStr += CMD_STRING; + // safely append the message - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + const message = `${this.message || ''}`; + cmdStr += escapeData(message); + return cmdStr; + } +} +function escapeData(s) { + return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A'); +} +function escape(s) { + return s + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/]/g, '%5D') + .replace(/;/g, '%3B'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 432: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +var crypto = __webpack_require__(417); + +var flags = { + NTLM_NegotiateUnicode : 0x00000001, + NTLM_NegotiateOEM : 0x00000002, + NTLM_RequestTarget : 0x00000004, + NTLM_Unknown9 : 0x00000008, + NTLM_NegotiateSign : 0x00000010, + NTLM_NegotiateSeal : 0x00000020, + NTLM_NegotiateDatagram : 0x00000040, + NTLM_NegotiateLanManagerKey : 0x00000080, + NTLM_Unknown8 : 0x00000100, + NTLM_NegotiateNTLM : 0x00000200, + NTLM_NegotiateNTOnly : 0x00000400, + NTLM_Anonymous : 0x00000800, + NTLM_NegotiateOemDomainSupplied : 0x00001000, + NTLM_NegotiateOemWorkstationSupplied : 0x00002000, + NTLM_Unknown6 : 0x00004000, + NTLM_NegotiateAlwaysSign : 0x00008000, + NTLM_TargetTypeDomain : 0x00010000, + NTLM_TargetTypeServer : 0x00020000, + NTLM_TargetTypeShare : 0x00040000, + NTLM_NegotiateExtendedSecurity : 0x00080000, + NTLM_NegotiateIdentify : 0x00100000, + NTLM_Unknown5 : 0x00200000, + NTLM_RequestNonNTSessionKey : 0x00400000, + NTLM_NegotiateTargetInfo : 0x00800000, + NTLM_Unknown4 : 0x01000000, + NTLM_NegotiateVersion : 0x02000000, + NTLM_Unknown3 : 0x04000000, + NTLM_Unknown2 : 0x08000000, + NTLM_Unknown1 : 0x10000000, + NTLM_Negotiate128 : 0x20000000, + NTLM_NegotiateKeyExchange : 0x40000000, + NTLM_Negotiate56 : 0x80000000 +}; +var typeflags = { + NTLM_TYPE1_FLAGS : flags.NTLM_NegotiateUnicode + + flags.NTLM_NegotiateOEM + + flags.NTLM_RequestTarget + + flags.NTLM_NegotiateNTLM + + flags.NTLM_NegotiateOemDomainSupplied + + flags.NTLM_NegotiateOemWorkstationSupplied + + flags.NTLM_NegotiateAlwaysSign + + flags.NTLM_NegotiateExtendedSecurity + + flags.NTLM_NegotiateVersion + + flags.NTLM_Negotiate128 + + flags.NTLM_Negotiate56, + + NTLM_TYPE2_FLAGS : flags.NTLM_NegotiateUnicode + + flags.NTLM_RequestTarget + + flags.NTLM_NegotiateNTLM + + flags.NTLM_NegotiateAlwaysSign + + flags.NTLM_NegotiateExtendedSecurity + + flags.NTLM_NegotiateTargetInfo + + flags.NTLM_NegotiateVersion + + flags.NTLM_Negotiate128 + + flags.NTLM_Negotiate56 +}; + +function createType1Message(options){ + var domain = escape(options.domain.toUpperCase()); + var workstation = escape(options.workstation.toUpperCase()); + var protocol = 'NTLMSSP\0'; + + var BODY_LENGTH = 40; + + var type1flags = typeflags.NTLM_TYPE1_FLAGS; + if(!domain || domain === '') + type1flags = type1flags - flags.NTLM_NegotiateOemDomainSupplied; + + var pos = 0; + var buf = new Buffer(BODY_LENGTH + domain.length + workstation.length); + + + buf.write(protocol, pos, protocol.length); pos += protocol.length; // protocol + buf.writeUInt32LE(1, pos); pos += 4; // type 1 + buf.writeUInt32LE(type1flags, pos); pos += 4; // TYPE1 flag + + buf.writeUInt16LE(domain.length, pos); pos += 2; // domain length + buf.writeUInt16LE(domain.length, pos); pos += 2; // domain max length + buf.writeUInt32LE(BODY_LENGTH + workstation.length, pos); pos += 4; // domain buffer offset + + buf.writeUInt16LE(workstation.length, pos); pos += 2; // workstation length + buf.writeUInt16LE(workstation.length, pos); pos += 2; // workstation max length + buf.writeUInt32LE(BODY_LENGTH, pos); pos += 4; // workstation buffer offset + + buf.writeUInt8(5, pos); pos += 1; //ProductMajorVersion + buf.writeUInt8(1, pos); pos += 1; //ProductMinorVersion + buf.writeUInt16LE(2600, pos); pos += 2; //ProductBuild + + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved1 + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved2 + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved3 + buf.writeUInt8(15, pos); pos += 1; //NTLMRevisionCurrent + + buf.write(workstation, pos, workstation.length, 'ascii'); pos += workstation.length; // workstation string + buf.write(domain , pos, domain.length , 'ascii'); pos += domain.length; + + return 'NTLM ' + buf.toString('base64'); +} + +function parseType2Message(rawmsg, callback){ + var match = rawmsg.match(/NTLM (.+)?/); + if(!match || !match[1]) + return callback(new Error("Couldn't find NTLM in the message type2 comming from the server")); + + var buf = new Buffer(match[1], 'base64'); + + var msg = {}; + + msg.signature = buf.slice(0, 8); + msg.type = buf.readInt16LE(8); + + if(msg.type != 2) + return callback(new Error("Server didn't return a type 2 message")); + + msg.targetNameLen = buf.readInt16LE(12); + msg.targetNameMaxLen = buf.readInt16LE(14); + msg.targetNameOffset = buf.readInt32LE(16); + msg.targetName = buf.slice(msg.targetNameOffset, msg.targetNameOffset + msg.targetNameMaxLen); + + msg.negotiateFlags = buf.readInt32LE(20); + msg.serverChallenge = buf.slice(24, 32); + msg.reserved = buf.slice(32, 40); + + if(msg.negotiateFlags & flags.NTLM_NegotiateTargetInfo){ + msg.targetInfoLen = buf.readInt16LE(40); + msg.targetInfoMaxLen = buf.readInt16LE(42); + msg.targetInfoOffset = buf.readInt32LE(44); + msg.targetInfo = buf.slice(msg.targetInfoOffset, msg.targetInfoOffset + msg.targetInfoLen); + } + return msg; +} + +function createType3Message(msg2, options){ + var nonce = msg2.serverChallenge; + var username = options.username; + var password = options.password; + var negotiateFlags = msg2.negotiateFlags; + + var isUnicode = negotiateFlags & flags.NTLM_NegotiateUnicode; + var isNegotiateExtendedSecurity = negotiateFlags & flags.NTLM_NegotiateExtendedSecurity; + + var BODY_LENGTH = 72; + + var domainName = escape(options.domain.toUpperCase()); + var workstation = escape(options.workstation.toUpperCase()); + + var workstationBytes, domainNameBytes, usernameBytes, encryptedRandomSessionKeyBytes; + + var encryptedRandomSessionKey = ""; + if(isUnicode){ + workstationBytes = new Buffer(workstation, 'utf16le'); + domainNameBytes = new Buffer(domainName, 'utf16le'); + usernameBytes = new Buffer(username, 'utf16le'); + encryptedRandomSessionKeyBytes = new Buffer(encryptedRandomSessionKey, 'utf16le'); + }else{ + workstationBytes = new Buffer(workstation, 'ascii'); + domainNameBytes = new Buffer(domainName, 'ascii'); + usernameBytes = new Buffer(username, 'ascii'); + encryptedRandomSessionKeyBytes = new Buffer(encryptedRandomSessionKey, 'ascii'); + } + + var lmChallengeResponse = calc_resp(create_LM_hashed_password_v1(password), nonce); + var ntChallengeResponse = calc_resp(create_NT_hashed_password_v1(password), nonce); + + if(isNegotiateExtendedSecurity){ + var pwhash = create_NT_hashed_password_v1(password); + var clientChallenge = ""; + for(var i=0; i < 8; i++){ + clientChallenge += String.fromCharCode( Math.floor(Math.random()*256) ); + } + var clientChallengeBytes = new Buffer(clientChallenge, 'ascii'); + var challenges = ntlm2sr_calc_resp(pwhash, nonce, clientChallengeBytes); + lmChallengeResponse = challenges.lmChallengeResponse; + ntChallengeResponse = challenges.ntChallengeResponse; + } + + var signature = 'NTLMSSP\0'; + + var pos = 0; + var buf = new Buffer(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length + ntChallengeResponse.length + encryptedRandomSessionKeyBytes.length); + + buf.write(signature, pos, signature.length); pos += signature.length; + buf.writeUInt32LE(3, pos); pos += 4; // type 1 + + buf.writeUInt16LE(lmChallengeResponse.length, pos); pos += 2; // LmChallengeResponseLen + buf.writeUInt16LE(lmChallengeResponse.length, pos); pos += 2; // LmChallengeResponseMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length, pos); pos += 4; // LmChallengeResponseOffset + + buf.writeUInt16LE(ntChallengeResponse.length, pos); pos += 2; // NtChallengeResponseLen + buf.writeUInt16LE(ntChallengeResponse.length, pos); pos += 2; // NtChallengeResponseMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length, pos); pos += 4; // NtChallengeResponseOffset + + buf.writeUInt16LE(domainNameBytes.length, pos); pos += 2; // DomainNameLen + buf.writeUInt16LE(domainNameBytes.length, pos); pos += 2; // DomainNameMaxLen + buf.writeUInt32LE(BODY_LENGTH, pos); pos += 4; // DomainNameOffset + + buf.writeUInt16LE(usernameBytes.length, pos); pos += 2; // UserNameLen + buf.writeUInt16LE(usernameBytes.length, pos); pos += 2; // UserNameMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length, pos); pos += 4; // UserNameOffset + + buf.writeUInt16LE(workstationBytes.length, pos); pos += 2; // WorkstationLen + buf.writeUInt16LE(workstationBytes.length, pos); pos += 2; // WorkstationMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length, pos); pos += 4; // WorkstationOffset + + buf.writeUInt16LE(encryptedRandomSessionKeyBytes.length, pos); pos += 2; // EncryptedRandomSessionKeyLen + buf.writeUInt16LE(encryptedRandomSessionKeyBytes.length, pos); pos += 2; // EncryptedRandomSessionKeyMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length + ntChallengeResponse.length, pos); pos += 4; // EncryptedRandomSessionKeyOffset + + buf.writeUInt32LE(typeflags.NTLM_TYPE2_FLAGS, pos); pos += 4; // NegotiateFlags + + buf.writeUInt8(5, pos); pos++; // ProductMajorVersion + buf.writeUInt8(1, pos); pos++; // ProductMinorVersion + buf.writeUInt16LE(2600, pos); pos += 2; // ProductBuild + buf.writeUInt8(0, pos); pos++; // VersionReserved1 + buf.writeUInt8(0, pos); pos++; // VersionReserved2 + buf.writeUInt8(0, pos); pos++; // VersionReserved3 + buf.writeUInt8(15, pos); pos++; // NTLMRevisionCurrent + + domainNameBytes.copy(buf, pos); pos += domainNameBytes.length; + usernameBytes.copy(buf, pos); pos += usernameBytes.length; + workstationBytes.copy(buf, pos); pos += workstationBytes.length; + lmChallengeResponse.copy(buf, pos); pos += lmChallengeResponse.length; + ntChallengeResponse.copy(buf, pos); pos += ntChallengeResponse.length; + encryptedRandomSessionKeyBytes.copy(buf, pos); pos += encryptedRandomSessionKeyBytes.length; + + return 'NTLM ' + buf.toString('base64'); +} + +function create_LM_hashed_password_v1(password){ + // fix the password length to 14 bytes + password = password.toUpperCase(); + var passwordBytes = new Buffer(password, 'ascii'); + + var passwordBytesPadded = new Buffer(14); + passwordBytesPadded.fill("\0"); + var sourceEnd = 14; + if(passwordBytes.length < 14) sourceEnd = passwordBytes.length; + passwordBytes.copy(passwordBytesPadded, 0, 0, sourceEnd); + + // split into 2 parts of 7 bytes: + var firstPart = passwordBytesPadded.slice(0,7); + var secondPart = passwordBytesPadded.slice(7); + + function encrypt(buf){ + var key = insertZerosEvery7Bits(buf); + var des = crypto.createCipheriv('DES-ECB', key, ''); + return des.update("KGS!@#$%"); // page 57 in [MS-NLMP]); + } + + var firstPartEncrypted = encrypt(firstPart); + var secondPartEncrypted = encrypt(secondPart); + + return Buffer.concat([firstPartEncrypted, secondPartEncrypted]); +} + +function insertZerosEvery7Bits(buf){ + var binaryArray = bytes2binaryArray(buf); + var newBinaryArray = []; + for(var i=0; i array.length) + break; + + var binString1 = '' + array[i] + '' + array[i+1] + '' + array[i+2] + '' + array[i+3]; + var binString2 = '' + array[i+4] + '' + array[i+5] + '' + array[i+6] + '' + array[i+7]; + var hexchar1 = binary2hex[binString1]; + var hexchar2 = binary2hex[binString2]; + + var buf = new Buffer(hexchar1 + '' + hexchar2, 'hex'); + bufArray.push(buf); + } + + return Buffer.concat(bufArray); +} + +function create_NT_hashed_password_v1(password){ + var buf = new Buffer(password, 'utf16le'); + var md4 = crypto.createHash('md4'); + md4.update(buf); + return new Buffer(md4.digest()); +} + +function calc_resp(password_hash, server_challenge){ + // padding with zeros to make the hash 21 bytes long + var passHashPadded = new Buffer(21); + passHashPadded.fill("\0"); + password_hash.copy(passHashPadded, 0, 0, password_hash.length); + + var resArray = []; + + var des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(0,7)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(7,14)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(14,21)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + return Buffer.concat(resArray); +} + +function ntlm2sr_calc_resp(responseKeyNT, serverChallenge, clientChallenge){ + // padding with zeros to make the hash 16 bytes longer + var lmChallengeResponse = new Buffer(clientChallenge.length + 16); + lmChallengeResponse.fill("\0"); + clientChallenge.copy(lmChallengeResponse, 0, 0, clientChallenge.length); + + var buf = Buffer.concat([serverChallenge, clientChallenge]); + var md5 = crypto.createHash('md5'); + md5.update(buf); + var sess = md5.digest(); + var ntChallengeResponse = calc_resp(responseKeyNT, sess.slice(0,8)); + + return { + lmChallengeResponse: lmChallengeResponse, + ntChallengeResponse: ntChallengeResponse + }; +} + +exports.createType1Message = createType1Message; +exports.parseType2Message = parseType2Message; +exports.createType3Message = createType3Message; + + + + + +/***/ }), + +/***/ 443: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const io = __importStar(__webpack_require__(1)); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +const uuidV4 = __importStar(__webpack_require__(826)); +const constants_1 = __webpack_require__(694); +// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === "win32"; + let tempDirectory = process.env["RUNNER_TEMP"] || ""; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env["USERPROFILE"] || "C:\\"; + } + else { + if (process.platform === "darwin") { + baseLocation = "/Users"; + } + else { + baseLocation = "/home"; + } + } + tempDirectory = path.join(baseLocation, "actions", "temp"); + } + const dest = path.join(tempDirectory, uuidV4.default()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function isExactKeyMatch(key, cacheResult) { + return !!(cacheResult && + cacheResult.cacheKey && + cacheResult.cacheKey.localeCompare(key, undefined, { + sensitivity: "accent" + }) === 0); +} +exports.isExactKeyMatch = isExactKeyMatch; +function setOutputAndState(key, cacheResult) { + setCacheHitOutput(isExactKeyMatch(key, cacheResult)); + // Store the cache result if it exists + cacheResult && setCacheState(cacheResult); +} +exports.setOutputAndState = setOutputAndState; +function getCacheState() { + const stateData = core.getState(constants_1.State.CacheResult); + core.debug(`State: ${stateData}`); + return (stateData && JSON.parse(stateData)); +} +exports.getCacheState = getCacheState; +function setCacheState(state) { + core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); +} +exports.setCacheState = setCacheState; +function setCacheHitOutput(isCacheHit) { + core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); +} +exports.setCacheHitOutput = setCacheHitOutput; +function resolvePath(filePath) { + if (filePath[0] === "~") { + const home = os.homedir(); + if (!home) { + throw new Error("Unable to resolve `~` to HOME"); + } + return path.join(home, filePath.slice(1)); + } + return path.resolve(filePath); +} +exports.resolvePath = resolvePath; + + +/***/ }), + +/***/ 470: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(431); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable + */ +function exportVariable(name, val) { + process.env[name] = val; + command_1.issueCommand('set-env', { name }, val); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + command_1.issueCommand('add-path', {}, inputPath); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store + */ +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message + */ +function error(message) { + command_1.issue('error', message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message + */ +function warning(message) { + command_1.issue('warning', message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store + */ +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 525: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +const _ = __webpack_require__(891); +const ntlm = __webpack_require__(432); +class NtlmCredentialHandler { + constructor(username, password, workstation, domain) { + this._ntlmOptions = {}; + this._ntlmOptions.username = username; + this._ntlmOptions.password = password; + if (domain !== undefined) { + this._ntlmOptions.domain = domain; + } + else { + this._ntlmOptions.domain = ''; + } + if (workstation !== undefined) { + this._ntlmOptions.workstation = workstation; + } + else { + this._ntlmOptions.workstation = ''; + } + } + prepareRequest(options) { + // No headers or options need to be set. We keep the credentials on the handler itself. + // If a (proxy) agent is set, remove it as we don't support proxy for NTLM at this time + if (options.agent) { + delete options.agent; + } + } + canHandleAuthentication(response) { + if (response && response.message && response.message.statusCode === 401) { + // Ensure that we're talking NTLM here + // Once we have the www-authenticate header, split it so we can ensure we can talk NTLM + const wwwAuthenticate = response.message.headers['www-authenticate']; + if (wwwAuthenticate) { + const mechanisms = wwwAuthenticate.split(', '); + const index = mechanisms.indexOf("NTLM"); + if (index >= 0) { + return true; + } + } + } + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return new Promise((resolve, reject) => { + const callbackForResult = function (err, res) { + if (err) { + reject(err); + } + // We have to readbody on the response before continuing otherwise there is a hang. + res.readBody().then(() => { + resolve(res); + }); + }; + this.handleAuthenticationPrivate(httpClient, requestInfo, objs, callbackForResult); + }); + } + handleAuthenticationPrivate(httpClient, requestInfo, objs, finalCallback) { + // Set up the headers for NTLM authentication + requestInfo.options = _.extend(requestInfo.options, { + username: this._ntlmOptions.username, + password: this._ntlmOptions.password, + domain: this._ntlmOptions.domain, + workstation: this._ntlmOptions.workstation + }); + if (httpClient.isSsl === true) { + requestInfo.options.agent = new https.Agent({ keepAlive: true }); + } + else { + requestInfo.options.agent = new http.Agent({ keepAlive: true }); + } + let self = this; + // The following pattern of sending the type1 message following immediately (in a setImmediate) is + // critical for the NTLM exchange to happen. If we removed setImmediate (or call in a different manner) + // the NTLM exchange will always fail with a 401. + this.sendType1Message(httpClient, requestInfo, objs, function (err, res) { + if (err) { + return finalCallback(err, null, null); + } + /// We have to readbody on the response before continuing otherwise there is a hang. + res.readBody().then(() => { + // It is critical that we have setImmediate here due to how connection requests are queued. + // If setImmediate is removed then the NTLM handshake will not work. + // setImmediate allows us to queue a second request on the same connection. If this second + // request is not queued on the connection when the first request finishes then node closes + // the connection. NTLM requires both requests to be on the same connection so we need this. + setImmediate(function () { + self.sendType3Message(httpClient, requestInfo, objs, res, finalCallback); + }); + }); + }); + } + // The following method is an adaptation of code found at https://github.com/SamDecrock/node-http-ntlm/blob/master/httpntlm.js + sendType1Message(httpClient, requestInfo, objs, finalCallback) { + const type1msg = ntlm.createType1Message(this._ntlmOptions); + const type1options = { + headers: { + 'Connection': 'keep-alive', + 'Authorization': type1msg + }, + timeout: requestInfo.options.timeout || 0, + agent: requestInfo.httpModule, + }; + const type1info = {}; + type1info.httpModule = requestInfo.httpModule; + type1info.parsedUrl = requestInfo.parsedUrl; + type1info.options = _.extend(type1options, _.omit(requestInfo.options, 'headers')); + return httpClient.requestRawWithCallback(type1info, objs, finalCallback); + } + // The following method is an adaptation of code found at https://github.com/SamDecrock/node-http-ntlm/blob/master/httpntlm.js + sendType3Message(httpClient, requestInfo, objs, res, callback) { + if (!res.message.headers && !res.message.headers['www-authenticate']) { + throw new Error('www-authenticate not found on response of second request'); + } + const type2msg = ntlm.parseType2Message(res.message.headers['www-authenticate']); + const type3msg = ntlm.createType3Message(type2msg, this._ntlmOptions); + const type3options = { + headers: { + 'Authorization': type3msg, + 'Connection': 'Close' + }, + agent: requestInfo.httpModule, + }; + const type3info = {}; + type3info.httpModule = requestInfo.httpModule; + type3info.parsedUrl = requestInfo.parsedUrl; + type3options.headers = _.extend(type3options.headers, requestInfo.options.headers); + type3info.options = _.extend(type3options, _.omit(requestInfo.options, 'headers')); + return httpClient.requestRawWithCallback(type3info, objs, callback); + } +} +exports.NtlmCredentialHandler = NtlmCredentialHandler; + + +/***/ }), + +/***/ 571: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Bearer ' + this.token; + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; + + +/***/ }), + +/***/ 605: +/***/ (function(module) { + +module.exports = require("http"); + +/***/ }), + +/***/ 614: +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), + +/***/ 622: +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), + +/***/ 631: +/***/ (function(module) { + +module.exports = require("net"); + +/***/ }), + +/***/ 669: +/***/ (function(module) { + +module.exports = require("util"); + +/***/ }), + +/***/ 672: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __webpack_require__(357); +const fs = __webpack_require__(747); +const path = __webpack_require__(622); +_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.IS_WINDOWS = process.platform === 'win32'; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Recursively create a directory at `fsPath`. + * + * This implementation is optimistic, meaning it attempts to create the full + * path first, and backs up the path stack from there. + * + * @param fsPath The path to create + * @param maxDepth The maximum recursion depth + * @param depth The current recursion depth + */ +function mkdirP(fsPath, maxDepth = 1000, depth = 1) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + fsPath = path.resolve(fsPath); + if (depth >= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); +} +exports.mkdirP = mkdirP; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 694: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +var Inputs; +(function (Inputs) { + Inputs.Key = "key"; + Inputs.Path = "path"; + Inputs.RestoreKeys = "restore-keys"; +})(Inputs = exports.Inputs || (exports.Inputs = {})); +var Outputs; +(function (Outputs) { + Outputs.CacheHit = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); +var State; +(function (State) { + State.CacheKey = "CACHE_KEY"; + State.CacheResult = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); + + +/***/ }), + +/***/ 722: +/***/ (function(module) { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]]]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 729: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +const path = __webpack_require__(622); +/** + * creates an url from a request url and optional base url (http://server:8080) + * @param {string} resource - a fully qualified url or relative path + * @param {string} baseUrl - an optional baseUrl (http://server:8080) + * @return {string} - resultant url + */ +function getUrl(resource, baseUrl) { + const pathApi = path.posix || path; + if (!baseUrl) { + return resource; + } + else if (!resource) { + return baseUrl; + } + else { + const base = url.parse(baseUrl); + const resultantUrl = url.parse(resource); + // resource (specific per request) elements take priority + resultantUrl.protocol = resultantUrl.protocol || base.protocol; + resultantUrl.auth = resultantUrl.auth || base.auth; + resultantUrl.host = resultantUrl.host || base.host; + resultantUrl.pathname = pathApi.resolve(base.pathname, resultantUrl.pathname); + if (!resultantUrl.pathname.endsWith('/') && resource.endsWith('/')) { + resultantUrl.pathname += '/'; + } + return url.format(resultantUrl); + } +} +exports.getUrl = getUrl; + + +/***/ }), + +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 778: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const exec_1 = __webpack_require__(986); +const io = __importStar(__webpack_require__(1)); +const fs = __importStar(__webpack_require__(747)); +const path = __importStar(__webpack_require__(622)); +const cacheHttpClient = __importStar(__webpack_require__(154)); +const constants_1 = __webpack_require__(694); +const utils = __importStar(__webpack_require__(443)); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + // Validate inputs, this can cause task failure + let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); + core.debug(`Cache Path: ${cachePath}`); + const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true }); + core.saveState(constants_1.State.CacheKey, primaryKey); + const restoreKeys = core.getInput(constants_1.Inputs.RestoreKeys).split("\n"); + const keys = [primaryKey, ...restoreKeys]; + core.debug("Resolved Keys:"); + core.debug(JSON.stringify(keys)); + if (keys.length > 10) { + core.setFailed(`Key Validation Error: Keys are limited to a maximum of 10.`); + return; + } + for (const key of keys) { + if (key.length > 512) { + core.setFailed(`Key Validation Error: ${key} cannot be larger than 512 characters.`); + return; + } + const regex = /^[^,]*$/; + if (!regex.test(key)) { + core.setFailed(`Key Validation Error: ${key} cannot contain commas.`); + return; + } + } + try { + const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); + if (!cacheEntry) { + core.info(`Cache not found for input keys: ${JSON.stringify(keys)}.`); + return; + } + let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); + core.debug(`Archive Path: ${archivePath}`); + // Store the cache result + utils.setCacheState(cacheEntry); + // Download the cache from the cache entry + yield cacheHttpClient.downloadCache(cacheEntry, archivePath); + io.mkdirP(cachePath); + // http://man7.org/linux/man-pages/man1/tar.1.html + // tar [-options] [files or directories which to add into archive] + const args = ["-xz"]; + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + args.push("--force-local"); + archivePath = archivePath.replace(/\\/g, "/"); + cachePath = cachePath.replace(/\\/g, "/"); + } + args.push(...["-f", archivePath, "-C", cachePath]); + const tarPath = yield io.which("tar", true); + core.debug(`Tar Path: ${tarPath}`); + const archiveFileSize = fs.statSync(archivePath).size; + core.debug(`File Size: ${archiveFileSize}`); + yield exec_1.exec(`"${tarPath}"`, args); + const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); + utils.setCacheHitOutput(isExactKeyMatch); + core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); + } + catch (error) { + core.warning(error.message); + utils.setCacheHitOutput(false); + } + } + catch (error) { + core.setFailed(error.message); + } + }); +} +run(); +exports.default = run; + + +/***/ }), + +/***/ 826: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var rng = __webpack_require__(139); +var bytesToUuid = __webpack_require__(722); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 835: +/***/ (function(module) { + +module.exports = require("url"); + +/***/ }), + +/***/ 874: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +let fs; +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; +const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + let output = ''; + this.message.on('data', (chunk) => { + output += chunk; + }); + this.message.on('end', () => { + resolve(output); + }); + })); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = url.parse(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +var EnvironmentVariables; +(function (EnvironmentVariables) { + EnvironmentVariables["HTTP_PROXY"] = "HTTP_PROXY"; + EnvironmentVariables["HTTPS_PROXY"] = "HTTPS_PROXY"; +})(EnvironmentVariables || (EnvironmentVariables = {})); +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + this._httpProxy = requestOptions.proxy; + if (requestOptions.proxy && requestOptions.proxy.proxyBypassHosts) { + this._httpProxyBypassHosts = []; + requestOptions.proxy.proxyBypassHosts.forEach(bypass => { + this._httpProxyBypassHosts.push(new RegExp(bypass, 'i')); + }); + } + this._certConfig = requestOptions.cert; + if (this._certConfig) { + // If using cert, need fs + fs = __webpack_require__(747); + // cache the cert content into memory, so we don't have to read it from disk every time + if (this._certConfig.caFile && fs.existsSync(this._certConfig.caFile)) { + this._ca = fs.readFileSync(this._certConfig.caFile, 'utf8'); + } + if (this._certConfig.certFile && fs.existsSync(this._certConfig.certFile)) { + this._cert = fs.readFileSync(this._certConfig.certFile, 'utf8'); + } + if (this._certConfig.keyFile && fs.existsSync(this._certConfig.keyFile)) { + this._key = fs.readFileSync(this._certConfig.keyFile, 'utf8'); + } + } + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error("Client has already been disposed."); + } + let info = this._prepareRequest(verb, requestUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 + && this._allowRedirects + && redirectsRemaining > 0) { + const redirectUrl = response.message.headers["location"]; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, redirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + let isDataString = typeof (data) === 'string'; + if (typeof (data) === 'string') { + info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', (sock) => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof (data) === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof (data) !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = url.parse(requestUrl); + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; + info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + info.options.headers["user-agent"] = this.userAgent; + info.options.agent = this._getAgent(requestUrl); + // gives handlers an opportunity to participate + if (this.handlers && !this._isPresigned(requestUrl)) { + this.handlers.forEach((handler) => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _isPresigned(requestUrl) { + if (this.requestOptions && this.requestOptions.presignedUrlPatterns) { + const patterns = this.requestOptions.presignedUrlPatterns; + for (let i = 0; i < patterns.length; i++) { + if (requestUrl.match(patterns[i])) { + return true; + } + } + } + return false; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getAgent(requestUrl) { + let agent; + let proxy = this._getProxy(requestUrl); + let useProxy = proxy.proxyUrl && proxy.proxyUrl.hostname && !this._isBypassProxy(requestUrl); + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + let parsedUrl = url.parse(requestUrl); + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(413); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: proxy.proxyAuth, + host: proxy.proxyUrl.hostname, + port: proxy.proxyUrl.port + }, + }; + let tunnelAgent; + const overHttps = proxy.proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); + } + if (usingSsl && this._certConfig) { + agent.options = Object.assign(agent.options || {}, { ca: this._ca, cert: this._cert, key: this._key, passphrase: this._certConfig.passphrase }); + } + return agent; + } + _getProxy(requestUrl) { + const parsedUrl = url.parse(requestUrl); + let usingSsl = parsedUrl.protocol === 'https:'; + let proxyConfig = this._httpProxy; + // fallback to http_proxy and https_proxy env + let https_proxy = process.env[EnvironmentVariables.HTTPS_PROXY]; + let http_proxy = process.env[EnvironmentVariables.HTTP_PROXY]; + if (!proxyConfig) { + if (https_proxy && usingSsl) { + proxyConfig = { + proxyUrl: https_proxy + }; + } + else if (http_proxy) { + proxyConfig = { + proxyUrl: http_proxy + }; + } + } + let proxyUrl; + let proxyAuth; + if (proxyConfig) { + if (proxyConfig.proxyUrl.length > 0) { + proxyUrl = url.parse(proxyConfig.proxyUrl); + } + if (proxyConfig.proxyUsername || proxyConfig.proxyPassword) { + proxyAuth = proxyConfig.proxyUsername + ":" + proxyConfig.proxyPassword; + } + } + return { proxyUrl: proxyUrl, proxyAuth: proxyAuth }; + } + _isBypassProxy(requestUrl) { + if (!this._httpProxyBypassHosts) { + return false; + } + let bypass = false; + this._httpProxyBypassHosts.forEach(bypassHost => { + if (bypassHost.test(requestUrl)) { + bypass = true; + } + }); + return bypass; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), + +/***/ 891: +/***/ (function(module, exports) { + +// Underscore.js 1.8.3 +// http://underscorejs.org +// (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors +// Underscore may be freely distributed under the MIT license. + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `exports` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. + var + push = ArrayProto.push, + slice = ArrayProto.slice, + toString = ObjProto.toString, + hasOwnProperty = ObjProto.hasOwnProperty; + + // All **ECMAScript 5** native function implementations that we hope to use + // are declared here. + var + nativeIsArray = Array.isArray, + nativeKeys = Object.keys, + nativeBind = FuncProto.bind, + nativeCreate = Object.create; + + // Naked function reference for surrogate-prototype-swapping. + var Ctor = function(){}; + + // Create a safe reference to the Underscore object for use below. + var _ = function(obj) { + if (obj instanceof _) return obj; + if (!(this instanceof _)) return new _(obj); + this._wrapped = obj; + }; + + // Export the Underscore object for **Node.js**, with + // backwards-compatibility for the old `require()` API. If we're in + // the browser, add `_` as a global object. + if (true) { + if ( true && module.exports) { + exports = module.exports = _; + } + exports._ = _; + } else {} + + // Current version. + _.VERSION = '1.8.3'; + + // Internal function that returns an efficient (for current engines) version + // of the passed-in callback, to be repeatedly applied in other Underscore + // functions. + var optimizeCb = function(func, context, argCount) { + if (context === void 0) return func; + switch (argCount == null ? 3 : argCount) { + case 1: return function(value) { + return func.call(context, value); + }; + case 2: return function(value, other) { + return func.call(context, value, other); + }; + case 3: return function(value, index, collection) { + return func.call(context, value, index, collection); + }; + case 4: return function(accumulator, value, index, collection) { + return func.call(context, accumulator, value, index, collection); + }; + } + return function() { + return func.apply(context, arguments); + }; + }; + + // A mostly-internal function to generate callbacks that can be applied + // to each element in a collection, returning the desired result — either + // identity, an arbitrary callback, a property matcher, or a property accessor. + var cb = function(value, context, argCount) { + if (value == null) return _.identity; + if (_.isFunction(value)) return optimizeCb(value, context, argCount); + if (_.isObject(value)) return _.matcher(value); + return _.property(value); + }; + _.iteratee = function(value, context) { + return cb(value, context, Infinity); + }; + + // An internal function for creating assigner functions. + var createAssigner = function(keysFunc, undefinedOnly) { + return function(obj) { + var length = arguments.length; + if (length < 2 || obj == null) return obj; + for (var index = 1; index < length; index++) { + var source = arguments[index], + keys = keysFunc(source), + l = keys.length; + for (var i = 0; i < l; i++) { + var key = keys[i]; + if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key]; + } + } + return obj; + }; + }; + + // An internal function for creating a new object that inherits from another. + var baseCreate = function(prototype) { + if (!_.isObject(prototype)) return {}; + if (nativeCreate) return nativeCreate(prototype); + Ctor.prototype = prototype; + var result = new Ctor; + Ctor.prototype = null; + return result; + }; + + var property = function(key) { + return function(obj) { + return obj == null ? void 0 : obj[key]; + }; + }; + + // Helper for collection methods to determine whether a collection + // should be iterated as an array or as an object + // Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength + // Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094 + var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1; + var getLength = property('length'); + var isArrayLike = function(collection) { + var length = getLength(collection); + return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX; + }; + + // Collection Functions + // -------------------- + + // The cornerstone, an `each` implementation, aka `forEach`. + // Handles raw objects in addition to array-likes. Treats all + // sparse array-likes as if they were dense. + _.each = _.forEach = function(obj, iteratee, context) { + iteratee = optimizeCb(iteratee, context); + var i, length; + if (isArrayLike(obj)) { + for (i = 0, length = obj.length; i < length; i++) { + iteratee(obj[i], i, obj); + } + } else { + var keys = _.keys(obj); + for (i = 0, length = keys.length; i < length; i++) { + iteratee(obj[keys[i]], keys[i], obj); + } + } + return obj; + }; + + // Return the results of applying the iteratee to each element. + _.map = _.collect = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + results = Array(length); + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + results[index] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Create a reducing function iterating left or right. + function createReduce(dir) { + // Optimized iterator function as using arguments.length + // in the main function will deoptimize the, see #1991. + function iterator(obj, iteratee, memo, keys, index, length) { + for (; index >= 0 && index < length; index += dir) { + var currentKey = keys ? keys[index] : index; + memo = iteratee(memo, obj[currentKey], currentKey, obj); + } + return memo; + } + + return function(obj, iteratee, memo, context) { + iteratee = optimizeCb(iteratee, context, 4); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + index = dir > 0 ? 0 : length - 1; + // Determine the initial value if none is provided. + if (arguments.length < 3) { + memo = obj[keys ? keys[index] : index]; + index += dir; + } + return iterator(obj, iteratee, memo, keys, index, length); + }; + } + + // **Reduce** builds up a single result from a list of values, aka `inject`, + // or `foldl`. + _.reduce = _.foldl = _.inject = createReduce(1); + + // The right-associative version of reduce, also known as `foldr`. + _.reduceRight = _.foldr = createReduce(-1); + + // Return the first value which passes a truth test. Aliased as `detect`. + _.find = _.detect = function(obj, predicate, context) { + var key; + if (isArrayLike(obj)) { + key = _.findIndex(obj, predicate, context); + } else { + key = _.findKey(obj, predicate, context); + } + if (key !== void 0 && key !== -1) return obj[key]; + }; + + // Return all the elements that pass a truth test. + // Aliased as `select`. + _.filter = _.select = function(obj, predicate, context) { + var results = []; + predicate = cb(predicate, context); + _.each(obj, function(value, index, list) { + if (predicate(value, index, list)) results.push(value); + }); + return results; + }; + + // Return all the elements for which a truth test fails. + _.reject = function(obj, predicate, context) { + return _.filter(obj, _.negate(cb(predicate)), context); + }; + + // Determine whether all of the elements match a truth test. + // Aliased as `all`. + _.every = _.all = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (!predicate(obj[currentKey], currentKey, obj)) return false; + } + return true; + }; + + // Determine if at least one element in the object matches a truth test. + // Aliased as `any`. + _.some = _.any = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (predicate(obj[currentKey], currentKey, obj)) return true; + } + return false; + }; + + // Determine if the array or object contains a given item (using `===`). + // Aliased as `includes` and `include`. + _.contains = _.includes = _.include = function(obj, item, fromIndex, guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + if (typeof fromIndex != 'number' || guard) fromIndex = 0; + return _.indexOf(obj, item, fromIndex) >= 0; + }; + + // Invoke a method (with arguments) on every item in a collection. + _.invoke = function(obj, method) { + var args = slice.call(arguments, 2); + var isFunc = _.isFunction(method); + return _.map(obj, function(value) { + var func = isFunc ? method : value[method]; + return func == null ? func : func.apply(value, args); + }); + }; + + // Convenience version of a common use case of `map`: fetching a property. + _.pluck = function(obj, key) { + return _.map(obj, _.property(key)); + }; + + // Convenience version of a common use case of `filter`: selecting only objects + // containing specific `key:value` pairs. + _.where = function(obj, attrs) { + return _.filter(obj, _.matcher(attrs)); + }; + + // Convenience version of a common use case of `find`: getting the first object + // containing specific `key:value` pairs. + _.findWhere = function(obj, attrs) { + return _.find(obj, _.matcher(attrs)); + }; + + // Return the maximum element (or element-based computation). + _.max = function(obj, iteratee, context) { + var result = -Infinity, lastComputed = -Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value > result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed > lastComputed || computed === -Infinity && result === -Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Return the minimum element (or element-based computation). + _.min = function(obj, iteratee, context) { + var result = Infinity, lastComputed = Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value < result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed < lastComputed || computed === Infinity && result === Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Shuffle a collection, using the modern version of the + // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle). + _.shuffle = function(obj) { + var set = isArrayLike(obj) ? obj : _.values(obj); + var length = set.length; + var shuffled = Array(length); + for (var index = 0, rand; index < length; index++) { + rand = _.random(0, index); + if (rand !== index) shuffled[index] = shuffled[rand]; + shuffled[rand] = set[index]; + } + return shuffled; + }; + + // Sample **n** random values from a collection. + // If **n** is not specified, returns a single random element. + // The internal `guard` argument allows it to work with `map`. + _.sample = function(obj, n, guard) { + if (n == null || guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + return obj[_.random(obj.length - 1)]; + } + return _.shuffle(obj).slice(0, Math.max(0, n)); + }; + + // Sort the object's values by a criterion produced by an iteratee. + _.sortBy = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + return _.pluck(_.map(obj, function(value, index, list) { + return { + value: value, + index: index, + criteria: iteratee(value, index, list) + }; + }).sort(function(left, right) { + var a = left.criteria; + var b = right.criteria; + if (a !== b) { + if (a > b || a === void 0) return 1; + if (a < b || b === void 0) return -1; + } + return left.index - right.index; + }), 'value'); + }; + + // An internal function used for aggregate "group by" operations. + var group = function(behavior) { + return function(obj, iteratee, context) { + var result = {}; + iteratee = cb(iteratee, context); + _.each(obj, function(value, index) { + var key = iteratee(value, index, obj); + behavior(result, value, key); + }); + return result; + }; + }; + + // Groups the object's values by a criterion. Pass either a string attribute + // to group by, or a function that returns the criterion. + _.groupBy = group(function(result, value, key) { + if (_.has(result, key)) result[key].push(value); else result[key] = [value]; + }); + + // Indexes the object's values by a criterion, similar to `groupBy`, but for + // when you know that your index values will be unique. + _.indexBy = group(function(result, value, key) { + result[key] = value; + }); + + // Counts instances of an object that group by a certain criterion. Pass + // either a string attribute to count by, or a function that returns the + // criterion. + _.countBy = group(function(result, value, key) { + if (_.has(result, key)) result[key]++; else result[key] = 1; + }); + + // Safely create a real, live array from anything iterable. + _.toArray = function(obj) { + if (!obj) return []; + if (_.isArray(obj)) return slice.call(obj); + if (isArrayLike(obj)) return _.map(obj, _.identity); + return _.values(obj); + }; + + // Return the number of elements in an object. + _.size = function(obj) { + if (obj == null) return 0; + return isArrayLike(obj) ? obj.length : _.keys(obj).length; + }; + + // Split a collection into two arrays: one whose elements all satisfy the given + // predicate, and one whose elements all do not satisfy the predicate. + _.partition = function(obj, predicate, context) { + predicate = cb(predicate, context); + var pass = [], fail = []; + _.each(obj, function(value, key, obj) { + (predicate(value, key, obj) ? pass : fail).push(value); + }); + return [pass, fail]; + }; + + // Array Functions + // --------------- + + // Get the first element of an array. Passing **n** will return the first N + // values in the array. Aliased as `head` and `take`. The **guard** check + // allows it to work with `_.map`. + _.first = _.head = _.take = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[0]; + return _.initial(array, array.length - n); + }; + + // Returns everything but the last entry of the array. Especially useful on + // the arguments object. Passing **n** will return all the values in + // the array, excluding the last N. + _.initial = function(array, n, guard) { + return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n))); + }; + + // Get the last element of an array. Passing **n** will return the last N + // values in the array. + _.last = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[array.length - 1]; + return _.rest(array, Math.max(0, array.length - n)); + }; + + // Returns everything but the first entry of the array. Aliased as `tail` and `drop`. + // Especially useful on the arguments object. Passing an **n** will return + // the rest N values in the array. + _.rest = _.tail = _.drop = function(array, n, guard) { + return slice.call(array, n == null || guard ? 1 : n); + }; + + // Trim out all falsy values from an array. + _.compact = function(array) { + return _.filter(array, _.identity); + }; + + // Internal implementation of a recursive `flatten` function. + var flatten = function(input, shallow, strict, startIndex) { + var output = [], idx = 0; + for (var i = startIndex || 0, length = getLength(input); i < length; i++) { + var value = input[i]; + if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) { + //flatten current level of array or arguments object + if (!shallow) value = flatten(value, shallow, strict); + var j = 0, len = value.length; + output.length += len; + while (j < len) { + output[idx++] = value[j++]; + } + } else if (!strict) { + output[idx++] = value; + } + } + return output; + }; + + // Flatten out an array, either recursively (by default), or just one level. + _.flatten = function(array, shallow) { + return flatten(array, shallow, false); + }; + + // Return a version of the array that does not contain the specified value(s). + _.without = function(array) { + return _.difference(array, slice.call(arguments, 1)); + }; + + // Produce a duplicate-free version of the array. If the array has already + // been sorted, you have the option of using a faster algorithm. + // Aliased as `unique`. + _.uniq = _.unique = function(array, isSorted, iteratee, context) { + if (!_.isBoolean(isSorted)) { + context = iteratee; + iteratee = isSorted; + isSorted = false; + } + if (iteratee != null) iteratee = cb(iteratee, context); + var result = []; + var seen = []; + for (var i = 0, length = getLength(array); i < length; i++) { + var value = array[i], + computed = iteratee ? iteratee(value, i, array) : value; + if (isSorted) { + if (!i || seen !== computed) result.push(value); + seen = computed; + } else if (iteratee) { + if (!_.contains(seen, computed)) { + seen.push(computed); + result.push(value); + } + } else if (!_.contains(result, value)) { + result.push(value); + } + } + return result; + }; + + // Produce an array that contains the union: each distinct element from all of + // the passed-in arrays. + _.union = function() { + return _.uniq(flatten(arguments, true, true)); + }; + + // Produce an array that contains every item shared between all the + // passed-in arrays. + _.intersection = function(array) { + var result = []; + var argsLength = arguments.length; + for (var i = 0, length = getLength(array); i < length; i++) { + var item = array[i]; + if (_.contains(result, item)) continue; + for (var j = 1; j < argsLength; j++) { + if (!_.contains(arguments[j], item)) break; + } + if (j === argsLength) result.push(item); + } + return result; + }; + + // Take the difference between one array and a number of other arrays. + // Only the elements present in just the first array will remain. + _.difference = function(array) { + var rest = flatten(arguments, true, true, 1); + return _.filter(array, function(value){ + return !_.contains(rest, value); + }); + }; + + // Zip together multiple lists into a single array -- elements that share + // an index go together. + _.zip = function() { + return _.unzip(arguments); + }; + + // Complement of _.zip. Unzip accepts an array of arrays and groups + // each array's elements on shared indices + _.unzip = function(array) { + var length = array && _.max(array, getLength).length || 0; + var result = Array(length); + + for (var index = 0; index < length; index++) { + result[index] = _.pluck(array, index); + } + return result; + }; + + // Converts lists into objects. Pass either a single array of `[key, value]` + // pairs, or two parallel arrays of the same length -- one of keys, and one of + // the corresponding values. + _.object = function(list, values) { + var result = {}; + for (var i = 0, length = getLength(list); i < length; i++) { + if (values) { + result[list[i]] = values[i]; + } else { + result[list[i][0]] = list[i][1]; + } + } + return result; + }; + + // Generator function to create the findIndex and findLastIndex functions + function createPredicateIndexFinder(dir) { + return function(array, predicate, context) { + predicate = cb(predicate, context); + var length = getLength(array); + var index = dir > 0 ? 0 : length - 1; + for (; index >= 0 && index < length; index += dir) { + if (predicate(array[index], index, array)) return index; + } + return -1; + }; + } + + // Returns the first index on an array-like that passes a predicate test + _.findIndex = createPredicateIndexFinder(1); + _.findLastIndex = createPredicateIndexFinder(-1); + + // Use a comparator function to figure out the smallest index at which + // an object should be inserted so as to maintain order. Uses binary search. + _.sortedIndex = function(array, obj, iteratee, context) { + iteratee = cb(iteratee, context, 1); + var value = iteratee(obj); + var low = 0, high = getLength(array); + while (low < high) { + var mid = Math.floor((low + high) / 2); + if (iteratee(array[mid]) < value) low = mid + 1; else high = mid; + } + return low; + }; + + // Generator function to create the indexOf and lastIndexOf functions + function createIndexFinder(dir, predicateFind, sortedIndex) { + return function(array, item, idx) { + var i = 0, length = getLength(array); + if (typeof idx == 'number') { + if (dir > 0) { + i = idx >= 0 ? idx : Math.max(idx + length, i); + } else { + length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1; + } + } else if (sortedIndex && idx && length) { + idx = sortedIndex(array, item); + return array[idx] === item ? idx : -1; + } + if (item !== item) { + idx = predicateFind(slice.call(array, i, length), _.isNaN); + return idx >= 0 ? idx + i : -1; + } + for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) { + if (array[idx] === item) return idx; + } + return -1; + }; + } + + // Return the position of the first occurrence of an item in an array, + // or -1 if the item is not included in the array. + // If the array is large and already in sort order, pass `true` + // for **isSorted** to use binary search. + _.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex); + _.lastIndexOf = createIndexFinder(-1, _.findLastIndex); + + // Generate an integer Array containing an arithmetic progression. A port of + // the native Python `range()` function. See + // [the Python documentation](http://docs.python.org/library/functions.html#range). + _.range = function(start, stop, step) { + if (stop == null) { + stop = start || 0; + start = 0; + } + step = step || 1; + + var length = Math.max(Math.ceil((stop - start) / step), 0); + var range = Array(length); + + for (var idx = 0; idx < length; idx++, start += step) { + range[idx] = start; + } + + return range; + }; + + // Function (ahem) Functions + // ------------------ + + // Determines whether to execute a function as a constructor + // or a normal function with the provided arguments + var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) { + if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args); + var self = baseCreate(sourceFunc.prototype); + var result = sourceFunc.apply(self, args); + if (_.isObject(result)) return result; + return self; + }; + + // Create a function bound to a given object (assigning `this`, and arguments, + // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if + // available. + _.bind = function(func, context) { + if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); + if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function'); + var args = slice.call(arguments, 2); + var bound = function() { + return executeBound(func, bound, context, this, args.concat(slice.call(arguments))); + }; + return bound; + }; + + // Partially apply a function by creating a version that has had some of its + // arguments pre-filled, without changing its dynamic `this` context. _ acts + // as a placeholder, allowing any combination of arguments to be pre-filled. + _.partial = function(func) { + var boundArgs = slice.call(arguments, 1); + var bound = function() { + var position = 0, length = boundArgs.length; + var args = Array(length); + for (var i = 0; i < length; i++) { + args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i]; + } + while (position < arguments.length) args.push(arguments[position++]); + return executeBound(func, bound, this, this, args); + }; + return bound; + }; + + // Bind a number of an object's methods to that object. Remaining arguments + // are the method names to be bound. Useful for ensuring that all callbacks + // defined on an object belong to it. + _.bindAll = function(obj) { + var i, length = arguments.length, key; + if (length <= 1) throw new Error('bindAll must be passed function names'); + for (i = 1; i < length; i++) { + key = arguments[i]; + obj[key] = _.bind(obj[key], obj); + } + return obj; + }; + + // Memoize an expensive function by storing its results. + _.memoize = function(func, hasher) { + var memoize = function(key) { + var cache = memoize.cache; + var address = '' + (hasher ? hasher.apply(this, arguments) : key); + if (!_.has(cache, address)) cache[address] = func.apply(this, arguments); + return cache[address]; + }; + memoize.cache = {}; + return memoize; + }; + + // Delays a function for the given number of milliseconds, and then calls + // it with the arguments supplied. + _.delay = function(func, wait) { + var args = slice.call(arguments, 2); + return setTimeout(function(){ + return func.apply(null, args); + }, wait); + }; + + // Defers a function, scheduling it to run after the current call stack has + // cleared. + _.defer = _.partial(_.delay, _, 1); + + // Returns a function, that, when invoked, will only be triggered at most once + // during a given window of time. Normally, the throttled function will run + // as much as it can, without ever going more than once per `wait` duration; + // but if you'd like to disable the execution on the leading edge, pass + // `{leading: false}`. To disable execution on the trailing edge, ditto. + _.throttle = function(func, wait, options) { + var context, args, result; + var timeout = null; + var previous = 0; + if (!options) options = {}; + var later = function() { + previous = options.leading === false ? 0 : _.now(); + timeout = null; + result = func.apply(context, args); + if (!timeout) context = args = null; + }; + return function() { + var now = _.now(); + if (!previous && options.leading === false) previous = now; + var remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0 || remaining > wait) { + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + previous = now; + result = func.apply(context, args); + if (!timeout) context = args = null; + } else if (!timeout && options.trailing !== false) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }; + + // Returns a function, that, as long as it continues to be invoked, will not + // be triggered. The function will be called after it stops being called for + // N milliseconds. If `immediate` is passed, trigger the function on the + // leading edge, instead of the trailing. + _.debounce = function(func, wait, immediate) { + var timeout, args, context, timestamp, result; + + var later = function() { + var last = _.now() - timestamp; + + if (last < wait && last >= 0) { + timeout = setTimeout(later, wait - last); + } else { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + if (!timeout) context = args = null; + } + } + }; + + return function() { + context = this; + args = arguments; + timestamp = _.now(); + var callNow = immediate && !timeout; + if (!timeout) timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + context = args = null; + } + + return result; + }; + }; + + // Returns the first function passed as an argument to the second, + // allowing you to adjust arguments, run code before and after, and + // conditionally execute the original function. + _.wrap = function(func, wrapper) { + return _.partial(wrapper, func); + }; + + // Returns a negated version of the passed-in predicate. + _.negate = function(predicate) { + return function() { + return !predicate.apply(this, arguments); + }; + }; + + // Returns a function that is the composition of a list of functions, each + // consuming the return value of the function that follows. + _.compose = function() { + var args = arguments; + var start = args.length - 1; + return function() { + var i = start; + var result = args[start].apply(this, arguments); + while (i--) result = args[i].call(this, result); + return result; + }; + }; + + // Returns a function that will only be executed on and after the Nth call. + _.after = function(times, func) { + return function() { + if (--times < 1) { + return func.apply(this, arguments); + } + }; + }; + + // Returns a function that will only be executed up to (but not including) the Nth call. + _.before = function(times, func) { + var memo; + return function() { + if (--times > 0) { + memo = func.apply(this, arguments); + } + if (times <= 1) func = null; + return memo; + }; + }; + + // Returns a function that will be executed at most one time, no matter how + // often you call it. Useful for lazy initialization. + _.once = _.partial(_.before, 2); + + // Object Functions + // ---------------- + + // Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed. + var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString'); + var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString', + 'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString']; + + function collectNonEnumProps(obj, keys) { + var nonEnumIdx = nonEnumerableProps.length; + var constructor = obj.constructor; + var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto; + + // Constructor is a special case. + var prop = 'constructor'; + if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop); + + while (nonEnumIdx--) { + prop = nonEnumerableProps[nonEnumIdx]; + if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) { + keys.push(prop); + } + } + } + + // Retrieve the names of an object's own properties. + // Delegates to **ECMAScript 5**'s native `Object.keys` + _.keys = function(obj) { + if (!_.isObject(obj)) return []; + if (nativeKeys) return nativeKeys(obj); + var keys = []; + for (var key in obj) if (_.has(obj, key)) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve all the property names of an object. + _.allKeys = function(obj) { + if (!_.isObject(obj)) return []; + var keys = []; + for (var key in obj) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve the values of an object's properties. + _.values = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var values = Array(length); + for (var i = 0; i < length; i++) { + values[i] = obj[keys[i]]; + } + return values; + }; + + // Returns the results of applying the iteratee to each element of the object + // In contrast to _.map it returns an object + _.mapObject = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = _.keys(obj), + length = keys.length, + results = {}, + currentKey; + for (var index = 0; index < length; index++) { + currentKey = keys[index]; + results[currentKey] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Convert an object into a list of `[key, value]` pairs. + _.pairs = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var pairs = Array(length); + for (var i = 0; i < length; i++) { + pairs[i] = [keys[i], obj[keys[i]]]; + } + return pairs; + }; + + // Invert the keys and values of an object. The values must be serializable. + _.invert = function(obj) { + var result = {}; + var keys = _.keys(obj); + for (var i = 0, length = keys.length; i < length; i++) { + result[obj[keys[i]]] = keys[i]; + } + return result; + }; + + // Return a sorted list of the function names available on the object. + // Aliased as `methods` + _.functions = _.methods = function(obj) { + var names = []; + for (var key in obj) { + if (_.isFunction(obj[key])) names.push(key); + } + return names.sort(); + }; + + // Extend a given object with all the properties in passed-in object(s). + _.extend = createAssigner(_.allKeys); + + // Assigns a given object with all the own properties in the passed-in object(s) + // (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) + _.extendOwn = _.assign = createAssigner(_.keys); + + // Returns the first key on an object that passes a predicate test + _.findKey = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = _.keys(obj), key; + for (var i = 0, length = keys.length; i < length; i++) { + key = keys[i]; + if (predicate(obj[key], key, obj)) return key; + } + }; + + // Return a copy of the object only containing the whitelisted properties. + _.pick = function(object, oiteratee, context) { + var result = {}, obj = object, iteratee, keys; + if (obj == null) return result; + if (_.isFunction(oiteratee)) { + keys = _.allKeys(obj); + iteratee = optimizeCb(oiteratee, context); + } else { + keys = flatten(arguments, false, false, 1); + iteratee = function(value, key, obj) { return key in obj; }; + obj = Object(obj); + } + for (var i = 0, length = keys.length; i < length; i++) { + var key = keys[i]; + var value = obj[key]; + if (iteratee(value, key, obj)) result[key] = value; + } + return result; + }; + + // Return a copy of the object without the blacklisted properties. + _.omit = function(obj, iteratee, context) { + if (_.isFunction(iteratee)) { + iteratee = _.negate(iteratee); + } else { + var keys = _.map(flatten(arguments, false, false, 1), String); + iteratee = function(value, key) { + return !_.contains(keys, key); + }; + } + return _.pick(obj, iteratee, context); + }; + + // Fill in a given object with default properties. + _.defaults = createAssigner(_.allKeys, true); + + // Creates an object that inherits from the given prototype object. + // If additional properties are provided then they will be added to the + // created object. + _.create = function(prototype, props) { + var result = baseCreate(prototype); + if (props) _.extendOwn(result, props); + return result; + }; + + // Create a (shallow-cloned) duplicate of an object. + _.clone = function(obj) { + if (!_.isObject(obj)) return obj; + return _.isArray(obj) ? obj.slice() : _.extend({}, obj); + }; + + // Invokes interceptor with the obj, and then returns obj. + // The primary purpose of this method is to "tap into" a method chain, in + // order to perform operations on intermediate results within the chain. + _.tap = function(obj, interceptor) { + interceptor(obj); + return obj; + }; + + // Returns whether an object has a given set of `key:value` pairs. + _.isMatch = function(object, attrs) { + var keys = _.keys(attrs), length = keys.length; + if (object == null) return !length; + var obj = Object(object); + for (var i = 0; i < length; i++) { + var key = keys[i]; + if (attrs[key] !== obj[key] || !(key in obj)) return false; + } + return true; + }; + + + // Internal recursive comparison function for `isEqual`. + var eq = function(a, b, aStack, bStack) { + // Identical objects are equal. `0 === -0`, but they aren't identical. + // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal). + if (a === b) return a !== 0 || 1 / a === 1 / b; + // A strict comparison is necessary because `null == undefined`. + if (a == null || b == null) return a === b; + // Unwrap any wrapped objects. + if (a instanceof _) a = a._wrapped; + if (b instanceof _) b = b._wrapped; + // Compare `[[Class]]` names. + var className = toString.call(a); + if (className !== toString.call(b)) return false; + switch (className) { + // Strings, numbers, regular expressions, dates, and booleans are compared by value. + case '[object RegExp]': + // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i') + case '[object String]': + // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is + // equivalent to `new String("5")`. + return '' + a === '' + b; + case '[object Number]': + // `NaN`s are equivalent, but non-reflexive. + // Object(NaN) is equivalent to NaN + if (+a !== +a) return +b !== +b; + // An `egal` comparison is performed for other numeric values. + return +a === 0 ? 1 / +a === 1 / b : +a === +b; + case '[object Date]': + case '[object Boolean]': + // Coerce dates and booleans to numeric primitive values. Dates are compared by their + // millisecond representations. Note that invalid dates with millisecond representations + // of `NaN` are not equivalent. + return +a === +b; + } + + var areArrays = className === '[object Array]'; + if (!areArrays) { + if (typeof a != 'object' || typeof b != 'object') return false; + + // Objects with different constructors are not equivalent, but `Object`s or `Array`s + // from different frames are. + var aCtor = a.constructor, bCtor = b.constructor; + if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor && + _.isFunction(bCtor) && bCtor instanceof bCtor) + && ('constructor' in a && 'constructor' in b)) { + return false; + } + } + // Assume equality for cyclic structures. The algorithm for detecting cyclic + // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. + + // Initializing stack of traversed objects. + // It's done here since we only need them for objects and arrays comparison. + aStack = aStack || []; + bStack = bStack || []; + var length = aStack.length; + while (length--) { + // Linear search. Performance is inversely proportional to the number of + // unique nested structures. + if (aStack[length] === a) return bStack[length] === b; + } + + // Add the first object to the stack of traversed objects. + aStack.push(a); + bStack.push(b); + + // Recursively compare objects and arrays. + if (areArrays) { + // Compare array lengths to determine if a deep comparison is necessary. + length = a.length; + if (length !== b.length) return false; + // Deep compare the contents, ignoring non-numeric properties. + while (length--) { + if (!eq(a[length], b[length], aStack, bStack)) return false; + } + } else { + // Deep compare objects. + var keys = _.keys(a), key; + length = keys.length; + // Ensure that both objects contain the same number of properties before comparing deep equality. + if (_.keys(b).length !== length) return false; + while (length--) { + // Deep compare each member + key = keys[length]; + if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false; + } + } + // Remove the first object from the stack of traversed objects. + aStack.pop(); + bStack.pop(); + return true; + }; + + // Perform a deep comparison to check if two objects are equal. + _.isEqual = function(a, b) { + return eq(a, b); + }; + + // Is a given array, string, or object empty? + // An "empty" object has no enumerable own-properties. + _.isEmpty = function(obj) { + if (obj == null) return true; + if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0; + return _.keys(obj).length === 0; + }; + + // Is a given value a DOM element? + _.isElement = function(obj) { + return !!(obj && obj.nodeType === 1); + }; + + // Is a given value an array? + // Delegates to ECMA5's native Array.isArray + _.isArray = nativeIsArray || function(obj) { + return toString.call(obj) === '[object Array]'; + }; + + // Is a given variable an object? + _.isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError. + _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) { + _['is' + name] = function(obj) { + return toString.call(obj) === '[object ' + name + ']'; + }; + }); + + // Define a fallback version of the method in browsers (ahem, IE < 9), where + // there isn't any inspectable "Arguments" type. + if (!_.isArguments(arguments)) { + _.isArguments = function(obj) { + return _.has(obj, 'callee'); + }; + } + + // Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8, + // IE 11 (#1621), and in Safari 8 (#1929). + if ( true && typeof Int8Array != 'object') { + _.isFunction = function(obj) { + return typeof obj == 'function' || false; + }; + } + + // Is a given object a finite number? + _.isFinite = function(obj) { + return isFinite(obj) && !isNaN(parseFloat(obj)); + }; + + // Is the given value `NaN`? (NaN is the only number which does not equal itself). + _.isNaN = function(obj) { + return _.isNumber(obj) && obj !== +obj; + }; + + // Is a given value a boolean? + _.isBoolean = function(obj) { + return obj === true || obj === false || toString.call(obj) === '[object Boolean]'; + }; + + // Is a given value equal to null? + _.isNull = function(obj) { + return obj === null; + }; + + // Is a given variable undefined? + _.isUndefined = function(obj) { + return obj === void 0; + }; + + // Shortcut function for checking if an object has a given property directly + // on itself (in other words, not on a prototype). + _.has = function(obj, key) { + return obj != null && hasOwnProperty.call(obj, key); + }; + + // Utility Functions + // ----------------- + + // Run Underscore.js in *noConflict* mode, returning the `_` variable to its + // previous owner. Returns a reference to the Underscore object. + _.noConflict = function() { + root._ = previousUnderscore; + return this; + }; + + // Keep the identity function around for default iteratees. + _.identity = function(value) { + return value; + }; + + // Predicate-generating functions. Often useful outside of Underscore. + _.constant = function(value) { + return function() { + return value; + }; + }; + + _.noop = function(){}; + + _.property = property; + + // Generates a function for a given object that returns a given property. + _.propertyOf = function(obj) { + return obj == null ? function(){} : function(key) { + return obj[key]; + }; + }; + + // Returns a predicate for checking whether an object has a given set of + // `key:value` pairs. + _.matcher = _.matches = function(attrs) { + attrs = _.extendOwn({}, attrs); + return function(obj) { + return _.isMatch(obj, attrs); + }; + }; + + // Run a function **n** times. + _.times = function(n, iteratee, context) { + var accum = Array(Math.max(0, n)); + iteratee = optimizeCb(iteratee, context, 1); + for (var i = 0; i < n; i++) accum[i] = iteratee(i); + return accum; + }; + + // Return a random integer between min and max (inclusive). + _.random = function(min, max) { + if (max == null) { + max = min; + min = 0; + } + return min + Math.floor(Math.random() * (max - min + 1)); + }; + + // A (possibly faster) way to get the current timestamp as an integer. + _.now = Date.now || function() { + return new Date().getTime(); + }; + + // List of HTML entities for escaping. + var escapeMap = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }; + var unescapeMap = _.invert(escapeMap); + + // Functions for escaping and unescaping strings to/from HTML interpolation. + var createEscaper = function(map) { + var escaper = function(match) { + return map[match]; + }; + // Regexes for identifying a key that needs to be escaped + var source = '(?:' + _.keys(map).join('|') + ')'; + var testRegexp = RegExp(source); + var replaceRegexp = RegExp(source, 'g'); + return function(string) { + string = string == null ? '' : '' + string; + return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string; + }; + }; + _.escape = createEscaper(escapeMap); + _.unescape = createEscaper(unescapeMap); + + // If the value of the named `property` is a function then invoke it with the + // `object` as context; otherwise, return it. + _.result = function(object, property, fallback) { + var value = object == null ? void 0 : object[property]; + if (value === void 0) { + value = fallback; + } + return _.isFunction(value) ? value.call(object) : value; + }; + + // Generate a unique integer id (unique within the entire client session). + // Useful for temporary DOM ids. + var idCounter = 0; + _.uniqueId = function(prefix) { + var id = ++idCounter + ''; + return prefix ? prefix + id : id; + }; + + // By default, Underscore uses ERB-style template delimiters, change the + // following template settings to use alternative delimiters. + _.templateSettings = { + evaluate : /<%([\s\S]+?)%>/g, + interpolate : /<%=([\s\S]+?)%>/g, + escape : /<%-([\s\S]+?)%>/g + }; + + // When customizing `templateSettings`, if you don't want to define an + // interpolation, evaluation or escaping regex, we need one that is + // guaranteed not to match. + var noMatch = /(.)^/; + + // Certain characters need to be escaped so that they can be put into a + // string literal. + var escapes = { + "'": "'", + '\\': '\\', + '\r': 'r', + '\n': 'n', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + var escaper = /\\|'|\r|\n|\u2028|\u2029/g; + + var escapeChar = function(match) { + return '\\' + escapes[match]; + }; + + // JavaScript micro-templating, similar to John Resig's implementation. + // Underscore templating handles arbitrary delimiters, preserves whitespace, + // and correctly escapes quotes within interpolated code. + // NB: `oldSettings` only exists for backwards compatibility. + _.template = function(text, settings, oldSettings) { + if (!settings && oldSettings) settings = oldSettings; + settings = _.defaults({}, settings, _.templateSettings); + + // Combine delimiters into one regular expression via alternation. + var matcher = RegExp([ + (settings.escape || noMatch).source, + (settings.interpolate || noMatch).source, + (settings.evaluate || noMatch).source + ].join('|') + '|$', 'g'); + + // Compile the template source, escaping string literals appropriately. + var index = 0; + var source = "__p+='"; + text.replace(matcher, function(match, escape, interpolate, evaluate, offset) { + source += text.slice(index, offset).replace(escaper, escapeChar); + index = offset + match.length; + + if (escape) { + source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'"; + } else if (interpolate) { + source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'"; + } else if (evaluate) { + source += "';\n" + evaluate + "\n__p+='"; + } + + // Adobe VMs need the match returned to produce the correct offest. + return match; + }); + source += "';\n"; + + // If a variable is not specified, place data values in local scope. + if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; + + source = "var __t,__p='',__j=Array.prototype.join," + + "print=function(){__p+=__j.call(arguments,'');};\n" + + source + 'return __p;\n'; + + try { + var render = new Function(settings.variable || 'obj', '_', source); + } catch (e) { + e.source = source; + throw e; + } + + var template = function(data) { + return render.call(this, data, _); + }; + + // Provide the compiled source as a convenience for precompilation. + var argument = settings.variable || 'obj'; + template.source = 'function(' + argument + '){\n' + source + '}'; + + return template; + }; + + // Add a "chain" function. Start chaining a wrapped Underscore object. + _.chain = function(obj) { + var instance = _(obj); + instance._chain = true; + return instance; + }; + + // OOP + // --------------- + // If Underscore is called as a function, it returns a wrapped object that + // can be used OO-style. This wrapper holds altered versions of all the + // underscore functions. Wrapped objects may be chained. + + // Helper function to continue chaining intermediate results. + var result = function(instance, obj) { + return instance._chain ? _(obj).chain() : obj; + }; + + // Add your own custom functions to the Underscore object. + _.mixin = function(obj) { + _.each(_.functions(obj), function(name) { + var func = _[name] = obj[name]; + _.prototype[name] = function() { + var args = [this._wrapped]; + push.apply(args, arguments); + return result(this, func.apply(_, args)); + }; + }); + }; + + // Add all of the Underscore functions to the wrapper object. + _.mixin(_); + + // Add all mutator Array functions to the wrapper. + _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + var obj = this._wrapped; + method.apply(obj, arguments); + if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0]; + return result(this, obj); + }; + }); + + // Add all accessor Array functions to the wrapper. + _.each(['concat', 'join', 'slice'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + return result(this, method.apply(this._wrapped, arguments)); + }; + }); + + // Extracts the result from a wrapped and chained object. + _.prototype.value = function() { + return this._wrapped; + }; + + // Provide unwrapping proxy for some methods used in engine operations + // such as arithmetic and JSON stringification. + _.prototype.valueOf = _.prototype.toJSON = _.prototype.value; + + _.prototype.toString = function() { + return '' + this._wrapped; + }; + + // AMD registration happens at the end for compatibility with AMD loaders + // that may not enforce next-turn semantics on modules. Even though general + // practice for AMD registration is to be anonymous, underscore registers + // as a named module because, like jQuery, it is a base library that is + // popular enough to be bundled in a third party lib, but not be part of + // an AMD load request. Those cases could generate an error when an + // anonymous define() is called outside of a loader request. + if (typeof define === 'function' && define.amd) { + define('underscore', [], function() { + return _; + }); + } +}.call(this)); + + +/***/ }), + +/***/ 941: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +var basiccreds_1 = __webpack_require__(12); +exports.BasicCredentialHandler = basiccreds_1.BasicCredentialHandler; +var bearertoken_1 = __webpack_require__(571); +exports.BearerCredentialHandler = bearertoken_1.BearerCredentialHandler; +var ntlm_1 = __webpack_require__(525); +exports.NtlmCredentialHandler = ntlm_1.NtlmCredentialHandler; +var personalaccesstoken_1 = __webpack_require__(327); +exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; + + +/***/ }), + +/***/ 986: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const tr = __webpack_require__(9); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +//# sourceMappingURL=exec.js.map + +/***/ }) + +/******/ }); \ No newline at end of file diff --git a/dist/save/index.js b/dist/save/index.js new file mode 100644 index 0000000..049a5c8 --- /dev/null +++ b/dist/save/index.js @@ -0,0 +1,5140 @@ +module.exports = +/******/ (function(modules, runtime) { // webpackBootstrap +/******/ "use strict"; +/******/ // The module cache +/******/ var installedModules = {}; +/******/ +/******/ // The require function +/******/ function __webpack_require__(moduleId) { +/******/ +/******/ // Check if module is in cache +/******/ if(installedModules[moduleId]) { +/******/ return installedModules[moduleId].exports; +/******/ } +/******/ // Create a new module (and put it into the cache) +/******/ var module = installedModules[moduleId] = { +/******/ i: moduleId, +/******/ l: false, +/******/ exports: {} +/******/ }; +/******/ +/******/ // Execute the module function +/******/ modules[moduleId].call(module.exports, module, module.exports, __webpack_require__); +/******/ +/******/ // Flag the module as loaded +/******/ module.l = true; +/******/ +/******/ // Return the exports of the module +/******/ return module.exports; +/******/ } +/******/ +/******/ +/******/ __webpack_require__.ab = __dirname + "/"; +/******/ +/******/ // the startup function +/******/ function startup() { +/******/ // Load entry module and return exports +/******/ return __webpack_require__(681); +/******/ }; +/******/ +/******/ // run startup +/******/ return startup(); +/******/ }) +/************************************************************************/ +/******/ ({ + +/***/ 1: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const childProcess = __webpack_require__(129); +const path = __webpack_require__(622); +const util_1 = __webpack_require__(669); +const ioUtil = __webpack_require__(672); +const exec = util_1.promisify(childProcess.exec); +/** + * Copies a file or folder. + * Based off of shelljs - https://github.com/shelljs/shelljs/blob/9237f66c52e5daa40458f94f9565e18e8132f5a6/src/cp.js + * + * @param source source path + * @param dest destination path + * @param options optional. See CopyOptions. + */ +function cp(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + const { force, recursive } = readCopyOptions(options); + const destStat = (yield ioUtil.exists(dest)) ? yield ioUtil.stat(dest) : null; + // Dest is an existing file, but not forcing + if (destStat && destStat.isFile() && !force) { + return; + } + // If dest is an existing directory, should copy inside. + const newDest = destStat && destStat.isDirectory() + ? path.join(dest, path.basename(source)) + : dest; + if (!(yield ioUtil.exists(source))) { + throw new Error(`no such file or directory: ${source}`); + } + const sourceStat = yield ioUtil.stat(source); + if (sourceStat.isDirectory()) { + if (!recursive) { + throw new Error(`Failed to copy. ${source} is a directory, but tried to copy without recursive flag.`); + } + else { + yield cpDirRecursive(source, newDest, 0, force); + } + } + else { + if (path.relative(source, newDest) === '') { + // a file cannot be copied to itself + throw new Error(`'${newDest}' and '${source}' are the same file`); + } + yield copyFile(source, newDest, force); + } + }); +} +exports.cp = cp; +/** + * Moves a path. + * + * @param source source path + * @param dest destination path + * @param options optional. See MoveOptions. + */ +function mv(source, dest, options = {}) { + return __awaiter(this, void 0, void 0, function* () { + if (yield ioUtil.exists(dest)) { + let destExists = true; + if (yield ioUtil.isDirectory(dest)) { + // If dest is directory copy src into dest + dest = path.join(dest, path.basename(source)); + destExists = yield ioUtil.exists(dest); + } + if (destExists) { + if (options.force == null || options.force) { + yield rmRF(dest); + } + else { + throw new Error('Destination already exists'); + } + } + } + yield mkdirP(path.dirname(dest)); + yield ioUtil.rename(source, dest); + }); +} +exports.mv = mv; +/** + * Remove a path recursively with force + * + * @param inputPath path to remove + */ +function rmRF(inputPath) { + return __awaiter(this, void 0, void 0, function* () { + if (ioUtil.IS_WINDOWS) { + // Node doesn't provide a delete operation, only an unlink function. This means that if the file is being used by another + // program (e.g. antivirus), it won't be deleted. To address this, we shell out the work to rd/del. + try { + if (yield ioUtil.isDirectory(inputPath, true)) { + yield exec(`rd /s /q "${inputPath}"`); + } + else { + yield exec(`del /f /a "${inputPath}"`); + } + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + // Shelling out fails to remove a symlink folder with missing source, this unlink catches that + try { + yield ioUtil.unlink(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + } + } + else { + let isDir = false; + try { + isDir = yield ioUtil.isDirectory(inputPath); + } + catch (err) { + // if you try to delete a file that doesn't exist, desired result is achieved + // other errors are valid + if (err.code !== 'ENOENT') + throw err; + return; + } + if (isDir) { + yield exec(`rm -rf "${inputPath}"`); + } + else { + yield ioUtil.unlink(inputPath); + } + } + }); +} +exports.rmRF = rmRF; +/** + * Make a directory. Creates the full path with folders in between + * Will throw if it fails + * + * @param fsPath path to create + * @returns Promise + */ +function mkdirP(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + yield ioUtil.mkdirP(fsPath); + }); +} +exports.mkdirP = mkdirP; +/** + * Returns path of a tool had the tool actually been invoked. Resolves via paths. + * If you check and the tool does not exist, it will throw. + * + * @param tool name of the tool + * @param check whether to check if tool exists + * @returns Promise path to tool + */ +function which(tool, check) { + return __awaiter(this, void 0, void 0, function* () { + if (!tool) { + throw new Error("parameter 'tool' is required"); + } + // recursive when check=true + if (check) { + const result = yield which(tool, false); + if (!result) { + if (ioUtil.IS_WINDOWS) { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also verify the file has a valid extension for an executable file.`); + } + else { + throw new Error(`Unable to locate executable file: ${tool}. Please verify either the file path exists or the file can be found within a directory specified by the PATH environment variable. Also check the file mode to verify the file is executable.`); + } + } + } + try { + // build the list of extensions to try + const extensions = []; + if (ioUtil.IS_WINDOWS && process.env.PATHEXT) { + for (const extension of process.env.PATHEXT.split(path.delimiter)) { + if (extension) { + extensions.push(extension); + } + } + } + // if it's rooted, return it if exists. otherwise return empty. + if (ioUtil.isRooted(tool)) { + const filePath = yield ioUtil.tryGetExecutablePath(tool, extensions); + if (filePath) { + return filePath; + } + return ''; + } + // if any path separators, return empty + if (tool.includes('/') || (ioUtil.IS_WINDOWS && tool.includes('\\'))) { + return ''; + } + // build the list of directories + // + // Note, technically "where" checks the current directory on Windows. From a toolkit perspective, + // it feels like we should not do this. Checking the current directory seems like more of a use + // case of a shell, and the which() function exposed by the toolkit should strive for consistency + // across platforms. + const directories = []; + if (process.env.PATH) { + for (const p of process.env.PATH.split(path.delimiter)) { + if (p) { + directories.push(p); + } + } + } + // return the first match + for (const directory of directories) { + const filePath = yield ioUtil.tryGetExecutablePath(directory + path.sep + tool, extensions); + if (filePath) { + return filePath; + } + } + return ''; + } + catch (err) { + throw new Error(`which failed with message ${err.message}`); + } + }); +} +exports.which = which; +function readCopyOptions(options) { + const force = options.force == null ? true : options.force; + const recursive = Boolean(options.recursive); + return { force, recursive }; +} +function cpDirRecursive(sourceDir, destDir, currentDepth, force) { + return __awaiter(this, void 0, void 0, function* () { + // Ensure there is not a run away recursive copy + if (currentDepth >= 255) + return; + currentDepth++; + yield mkdirP(destDir); + const files = yield ioUtil.readdir(sourceDir); + for (const fileName of files) { + const srcFile = `${sourceDir}/${fileName}`; + const destFile = `${destDir}/${fileName}`; + const srcFileStat = yield ioUtil.lstat(srcFile); + if (srcFileStat.isDirectory()) { + // Recurse + yield cpDirRecursive(srcFile, destFile, currentDepth, force); + } + else { + yield copyFile(srcFile, destFile, force); + } + } + // Change the mode for the newly created directory + yield ioUtil.chmod(destDir, (yield ioUtil.stat(sourceDir)).mode); + }); +} +// Buffered file copy +function copyFile(srcFile, destFile, force) { + return __awaiter(this, void 0, void 0, function* () { + if ((yield ioUtil.lstat(srcFile)).isSymbolicLink()) { + // unlink/re-link it + try { + yield ioUtil.lstat(destFile); + yield ioUtil.unlink(destFile); + } + catch (e) { + // Try to override file permission + if (e.code === 'EPERM') { + yield ioUtil.chmod(destFile, '0666'); + yield ioUtil.unlink(destFile); + } + // other errors = it doesn't exist, no work to do + } + // Copy over symlink + const symlinkFull = yield ioUtil.readlink(srcFile); + yield ioUtil.symlink(symlinkFull, destFile, ioUtil.IS_WINDOWS ? 'junction' : null); + } + else if (!(yield ioUtil.exists(destFile)) || force) { + yield ioUtil.copyFile(srcFile, destFile); + } + }); +} +//# sourceMappingURL=io.js.map + +/***/ }), + +/***/ 9: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __webpack_require__(87); +const events = __webpack_require__(614); +const child = __webpack_require__(129); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + }); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), + +/***/ 12: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Basic ' + new Buffer(this.username + ':' + this.password).toString('base64'); + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; + + +/***/ }), + +/***/ 16: +/***/ (function(module) { + +module.exports = require("tls"); + +/***/ }), + +/***/ 87: +/***/ (function(module) { + +module.exports = require("os"); + +/***/ }), + +/***/ 105: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const httpm = __webpack_require__(874); +const util = __webpack_require__(729); +class RestClient { + /** + * Creates an instance of the RestClient + * @constructor + * @param {string} userAgent - userAgent for requests + * @param {string} baseUrl - (Optional) If not specified, use full urls per request. If supplied and a function passes a relative url, it will be appended to this + * @param {ifm.IRequestHandler[]} handlers - handlers are typically auth handlers (basic, bearer, ntlm supplied) + * @param {ifm.IRequestOptions} requestOptions - options for each http requests (http proxy setting, socket timeout) + */ + constructor(userAgent, baseUrl, handlers, requestOptions) { + this.client = new httpm.HttpClient(userAgent, handlers, requestOptions); + if (baseUrl) { + this._baseUrl = baseUrl; + } + } + /** + * Gets a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} requestUrl - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + options(requestUrl, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(requestUrl, this._baseUrl); + let res = yield this.client.options(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Gets a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified url or relative path + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + get(resource, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let res = yield this.client.get(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Deletes a resource from an endpoint + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + del(resource, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let res = yield this.client.del(url, this._headersFromOptions(options)); + return this._processResponse(res, options); + }); + } + /** + * Creates resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + create(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.post(url, data, headers); + return this._processResponse(res, options); + }); + } + /** + * Updates resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + update(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.patch(url, data, headers); + return this._processResponse(res, options); + }); + } + /** + * Replaces resource(s) from an endpoint + * T type of object returned. + * Be aware that not found returns a null. Other error conditions reject the promise + * @param {string} resource - fully qualified or relative url + * @param {IRequestOptions} requestOptions - (optional) requestOptions object + */ + replace(resource, resources, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(resource, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let data = JSON.stringify(resources, null, 2); + let res = yield this.client.put(url, data, headers); + return this._processResponse(res, options); + }); + } + uploadStream(verb, requestUrl, stream, options) { + return __awaiter(this, void 0, void 0, function* () { + let url = util.getUrl(requestUrl, this._baseUrl); + let headers = this._headersFromOptions(options, true); + let res = yield this.client.sendStream(verb, url, stream, headers); + return this._processResponse(res, options); + }); + } + _headersFromOptions(options, contentType) { + options = options || {}; + let headers = options.additionalHeaders || {}; + headers["Accept"] = options.acceptHeader || "application/json"; + if (contentType) { + let found = false; + for (let header in headers) { + if (header.toLowerCase() == "content-type") { + found = true; + } + } + if (!found) { + headers["Content-Type"] = 'application/json; charset=utf-8'; + } + } + return headers; + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + _processResponse(res, options) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == httpm.HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = yield res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, RestClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + if (options && options.responseProcessor) { + response.result = options.responseProcessor(obj); + } + else { + response.result = obj; + } + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = "Failed request: (" + statusCode + ")"; + } + let err = new Error(msg); + // attach statusCode and body obj (if available) to the error object + err['statusCode'] = statusCode; + if (response.result) { + err['result'] = response.result; + } + reject(err); + } + else { + resolve(response); + } + })); + }); + } +} +exports.RestClient = RestClient; + + +/***/ }), + +/***/ 129: +/***/ (function(module) { + +module.exports = require("child_process"); + +/***/ }), + +/***/ 139: +/***/ (function(module, __unusedexports, __webpack_require__) { + +// Unique ID creation requires a high quality random # generator. In node.js +// this is pretty straight-forward - we use the crypto API. + +var crypto = __webpack_require__(417); + +module.exports = function nodeRNG() { + return crypto.randomBytes(16); +}; + + +/***/ }), + +/***/ 141: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); + + +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; + + +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; +} + +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + return agent; +} + +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} + +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + return agent; +} + + +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; + + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); +} +util.inherits(TunnelingAgent, events.EventEmitter); + +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); + + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); + + function onFree() { + self.emit('free', socket, options); + } + + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; + +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); + + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false + }); + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); + } + + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } + + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } + + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); + + if (res.statusCode === 200) { + assert.equal(head.length, 0); + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + cb(socket); + } else { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } + } + + function onError(cause) { + connectReq.removeAllListeners(); + + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } +}; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); + }); + } +}; + +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); + + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} + + +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; + } + return host; // for v0.11 or later +} + +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } + } + } + } + return target; +} + + +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; +} +exports.debug = debug; // for test + + +/***/ }), + +/***/ 154: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const fs = __importStar(__webpack_require__(747)); +const Handlers_1 = __webpack_require__(941); +const HttpClient_1 = __webpack_require__(874); +const RestClient_1 = __webpack_require__(105); +function getCacheEntry(keys) { + return __awaiter(this, void 0, void 0, function* () { + const cacheUrl = getCacheUrl(); + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; + const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ + bearerCredentialHandler + ]); + const response = yield restClient.get(resource, getRequestOptions()); + if (response.statusCode === 204) { + return null; + } + if (response.statusCode !== 200) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + const cacheResult = response.result; + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); + if (!cacheResult || !cacheResult.archiveLocation) { + throw new Error("Cache not found."); + } + return cacheResult; + }); +} +exports.getCacheEntry = getCacheEntry; +function downloadCache(cacheEntry, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createWriteStream(archivePath); + const httpClient = new HttpClient_1.HttpClient("actions/cache"); + const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + yield pipeResponseToStream(downloadResponse, stream); + }); +} +exports.downloadCache = downloadCache; +function pipeResponseToStream(response, stream) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise(resolve => { + response.message.pipe(stream).on("close", () => { + resolve(); + }); + }); + }); +} +function saveCache(stream, key) { + return __awaiter(this, void 0, void 0, function* () { + const cacheUrl = getCacheUrl(); + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; + const postUrl = cacheUrl + resource; + const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ + bearerCredentialHandler + ]); + const requestOptions = getRequestOptions(); + requestOptions.additionalHeaders = { + "Content-Type": "application/octet-stream" + }; + const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); + if (response.statusCode !== 200) { + throw new Error(`Cache service responded with ${response.statusCode}`); + } + core.info("Cache saved successfully"); + }); +} +exports.saveCache = saveCache; +function getRequestOptions() { + const requestOptions = { + acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + }; + return requestOptions; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getCacheUrl() { + // Ideally we just use ACTIONS_CACHE_URL + let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + process.env["ACTIONS_RUNTIME_URL"] || + "").replace("pipelines", "artifactcache"); + if (!cacheUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); + } + core.debug(`Cache Url: ${cacheUrl}`); + return cacheUrl; +} + + +/***/ }), + +/***/ 211: +/***/ (function(module) { + +module.exports = require("https"); + +/***/ }), + +/***/ 327: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Basic ' + new Buffer('PAT:' + this.token).toString('base64'); + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; + + +/***/ }), + +/***/ 357: +/***/ (function(module) { + +module.exports = require("assert"); + +/***/ }), + +/***/ 413: +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + +/***/ }), + +/***/ 417: +/***/ (function(module) { + +module.exports = require("crypto"); + +/***/ }), + +/***/ 431: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __webpack_require__(87); +/** + * Commands + * + * Command Format: + * ##[name key=value;key=value]message + * + * Examples: + * ##[warning]This is the user warning message + * ##[set-secret name=mypassword]definitelyNotAPassword! + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + // safely append the val - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + cmdStr += `${key}=${escape(`${val || ''}`)},`; + } + } + } + } + cmdStr += CMD_STRING; + // safely append the message - avoid blowing up when attempting to + // call .replace() if message is not a string for some reason + const message = `${this.message || ''}`; + cmdStr += escapeData(message); + return cmdStr; + } +} +function escapeData(s) { + return s.replace(/\r/g, '%0D').replace(/\n/g, '%0A'); +} +function escape(s) { + return s + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/]/g, '%5D') + .replace(/;/g, '%3B'); +} +//# sourceMappingURL=command.js.map + +/***/ }), + +/***/ 432: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +var crypto = __webpack_require__(417); + +var flags = { + NTLM_NegotiateUnicode : 0x00000001, + NTLM_NegotiateOEM : 0x00000002, + NTLM_RequestTarget : 0x00000004, + NTLM_Unknown9 : 0x00000008, + NTLM_NegotiateSign : 0x00000010, + NTLM_NegotiateSeal : 0x00000020, + NTLM_NegotiateDatagram : 0x00000040, + NTLM_NegotiateLanManagerKey : 0x00000080, + NTLM_Unknown8 : 0x00000100, + NTLM_NegotiateNTLM : 0x00000200, + NTLM_NegotiateNTOnly : 0x00000400, + NTLM_Anonymous : 0x00000800, + NTLM_NegotiateOemDomainSupplied : 0x00001000, + NTLM_NegotiateOemWorkstationSupplied : 0x00002000, + NTLM_Unknown6 : 0x00004000, + NTLM_NegotiateAlwaysSign : 0x00008000, + NTLM_TargetTypeDomain : 0x00010000, + NTLM_TargetTypeServer : 0x00020000, + NTLM_TargetTypeShare : 0x00040000, + NTLM_NegotiateExtendedSecurity : 0x00080000, + NTLM_NegotiateIdentify : 0x00100000, + NTLM_Unknown5 : 0x00200000, + NTLM_RequestNonNTSessionKey : 0x00400000, + NTLM_NegotiateTargetInfo : 0x00800000, + NTLM_Unknown4 : 0x01000000, + NTLM_NegotiateVersion : 0x02000000, + NTLM_Unknown3 : 0x04000000, + NTLM_Unknown2 : 0x08000000, + NTLM_Unknown1 : 0x10000000, + NTLM_Negotiate128 : 0x20000000, + NTLM_NegotiateKeyExchange : 0x40000000, + NTLM_Negotiate56 : 0x80000000 +}; +var typeflags = { + NTLM_TYPE1_FLAGS : flags.NTLM_NegotiateUnicode + + flags.NTLM_NegotiateOEM + + flags.NTLM_RequestTarget + + flags.NTLM_NegotiateNTLM + + flags.NTLM_NegotiateOemDomainSupplied + + flags.NTLM_NegotiateOemWorkstationSupplied + + flags.NTLM_NegotiateAlwaysSign + + flags.NTLM_NegotiateExtendedSecurity + + flags.NTLM_NegotiateVersion + + flags.NTLM_Negotiate128 + + flags.NTLM_Negotiate56, + + NTLM_TYPE2_FLAGS : flags.NTLM_NegotiateUnicode + + flags.NTLM_RequestTarget + + flags.NTLM_NegotiateNTLM + + flags.NTLM_NegotiateAlwaysSign + + flags.NTLM_NegotiateExtendedSecurity + + flags.NTLM_NegotiateTargetInfo + + flags.NTLM_NegotiateVersion + + flags.NTLM_Negotiate128 + + flags.NTLM_Negotiate56 +}; + +function createType1Message(options){ + var domain = escape(options.domain.toUpperCase()); + var workstation = escape(options.workstation.toUpperCase()); + var protocol = 'NTLMSSP\0'; + + var BODY_LENGTH = 40; + + var type1flags = typeflags.NTLM_TYPE1_FLAGS; + if(!domain || domain === '') + type1flags = type1flags - flags.NTLM_NegotiateOemDomainSupplied; + + var pos = 0; + var buf = new Buffer(BODY_LENGTH + domain.length + workstation.length); + + + buf.write(protocol, pos, protocol.length); pos += protocol.length; // protocol + buf.writeUInt32LE(1, pos); pos += 4; // type 1 + buf.writeUInt32LE(type1flags, pos); pos += 4; // TYPE1 flag + + buf.writeUInt16LE(domain.length, pos); pos += 2; // domain length + buf.writeUInt16LE(domain.length, pos); pos += 2; // domain max length + buf.writeUInt32LE(BODY_LENGTH + workstation.length, pos); pos += 4; // domain buffer offset + + buf.writeUInt16LE(workstation.length, pos); pos += 2; // workstation length + buf.writeUInt16LE(workstation.length, pos); pos += 2; // workstation max length + buf.writeUInt32LE(BODY_LENGTH, pos); pos += 4; // workstation buffer offset + + buf.writeUInt8(5, pos); pos += 1; //ProductMajorVersion + buf.writeUInt8(1, pos); pos += 1; //ProductMinorVersion + buf.writeUInt16LE(2600, pos); pos += 2; //ProductBuild + + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved1 + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved2 + buf.writeUInt8(0 , pos); pos += 1; //VersionReserved3 + buf.writeUInt8(15, pos); pos += 1; //NTLMRevisionCurrent + + buf.write(workstation, pos, workstation.length, 'ascii'); pos += workstation.length; // workstation string + buf.write(domain , pos, domain.length , 'ascii'); pos += domain.length; + + return 'NTLM ' + buf.toString('base64'); +} + +function parseType2Message(rawmsg, callback){ + var match = rawmsg.match(/NTLM (.+)?/); + if(!match || !match[1]) + return callback(new Error("Couldn't find NTLM in the message type2 comming from the server")); + + var buf = new Buffer(match[1], 'base64'); + + var msg = {}; + + msg.signature = buf.slice(0, 8); + msg.type = buf.readInt16LE(8); + + if(msg.type != 2) + return callback(new Error("Server didn't return a type 2 message")); + + msg.targetNameLen = buf.readInt16LE(12); + msg.targetNameMaxLen = buf.readInt16LE(14); + msg.targetNameOffset = buf.readInt32LE(16); + msg.targetName = buf.slice(msg.targetNameOffset, msg.targetNameOffset + msg.targetNameMaxLen); + + msg.negotiateFlags = buf.readInt32LE(20); + msg.serverChallenge = buf.slice(24, 32); + msg.reserved = buf.slice(32, 40); + + if(msg.negotiateFlags & flags.NTLM_NegotiateTargetInfo){ + msg.targetInfoLen = buf.readInt16LE(40); + msg.targetInfoMaxLen = buf.readInt16LE(42); + msg.targetInfoOffset = buf.readInt32LE(44); + msg.targetInfo = buf.slice(msg.targetInfoOffset, msg.targetInfoOffset + msg.targetInfoLen); + } + return msg; +} + +function createType3Message(msg2, options){ + var nonce = msg2.serverChallenge; + var username = options.username; + var password = options.password; + var negotiateFlags = msg2.negotiateFlags; + + var isUnicode = negotiateFlags & flags.NTLM_NegotiateUnicode; + var isNegotiateExtendedSecurity = negotiateFlags & flags.NTLM_NegotiateExtendedSecurity; + + var BODY_LENGTH = 72; + + var domainName = escape(options.domain.toUpperCase()); + var workstation = escape(options.workstation.toUpperCase()); + + var workstationBytes, domainNameBytes, usernameBytes, encryptedRandomSessionKeyBytes; + + var encryptedRandomSessionKey = ""; + if(isUnicode){ + workstationBytes = new Buffer(workstation, 'utf16le'); + domainNameBytes = new Buffer(domainName, 'utf16le'); + usernameBytes = new Buffer(username, 'utf16le'); + encryptedRandomSessionKeyBytes = new Buffer(encryptedRandomSessionKey, 'utf16le'); + }else{ + workstationBytes = new Buffer(workstation, 'ascii'); + domainNameBytes = new Buffer(domainName, 'ascii'); + usernameBytes = new Buffer(username, 'ascii'); + encryptedRandomSessionKeyBytes = new Buffer(encryptedRandomSessionKey, 'ascii'); + } + + var lmChallengeResponse = calc_resp(create_LM_hashed_password_v1(password), nonce); + var ntChallengeResponse = calc_resp(create_NT_hashed_password_v1(password), nonce); + + if(isNegotiateExtendedSecurity){ + var pwhash = create_NT_hashed_password_v1(password); + var clientChallenge = ""; + for(var i=0; i < 8; i++){ + clientChallenge += String.fromCharCode( Math.floor(Math.random()*256) ); + } + var clientChallengeBytes = new Buffer(clientChallenge, 'ascii'); + var challenges = ntlm2sr_calc_resp(pwhash, nonce, clientChallengeBytes); + lmChallengeResponse = challenges.lmChallengeResponse; + ntChallengeResponse = challenges.ntChallengeResponse; + } + + var signature = 'NTLMSSP\0'; + + var pos = 0; + var buf = new Buffer(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length + ntChallengeResponse.length + encryptedRandomSessionKeyBytes.length); + + buf.write(signature, pos, signature.length); pos += signature.length; + buf.writeUInt32LE(3, pos); pos += 4; // type 1 + + buf.writeUInt16LE(lmChallengeResponse.length, pos); pos += 2; // LmChallengeResponseLen + buf.writeUInt16LE(lmChallengeResponse.length, pos); pos += 2; // LmChallengeResponseMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length, pos); pos += 4; // LmChallengeResponseOffset + + buf.writeUInt16LE(ntChallengeResponse.length, pos); pos += 2; // NtChallengeResponseLen + buf.writeUInt16LE(ntChallengeResponse.length, pos); pos += 2; // NtChallengeResponseMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length, pos); pos += 4; // NtChallengeResponseOffset + + buf.writeUInt16LE(domainNameBytes.length, pos); pos += 2; // DomainNameLen + buf.writeUInt16LE(domainNameBytes.length, pos); pos += 2; // DomainNameMaxLen + buf.writeUInt32LE(BODY_LENGTH, pos); pos += 4; // DomainNameOffset + + buf.writeUInt16LE(usernameBytes.length, pos); pos += 2; // UserNameLen + buf.writeUInt16LE(usernameBytes.length, pos); pos += 2; // UserNameMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length, pos); pos += 4; // UserNameOffset + + buf.writeUInt16LE(workstationBytes.length, pos); pos += 2; // WorkstationLen + buf.writeUInt16LE(workstationBytes.length, pos); pos += 2; // WorkstationMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length, pos); pos += 4; // WorkstationOffset + + buf.writeUInt16LE(encryptedRandomSessionKeyBytes.length, pos); pos += 2; // EncryptedRandomSessionKeyLen + buf.writeUInt16LE(encryptedRandomSessionKeyBytes.length, pos); pos += 2; // EncryptedRandomSessionKeyMaxLen + buf.writeUInt32LE(BODY_LENGTH + domainNameBytes.length + usernameBytes.length + workstationBytes.length + lmChallengeResponse.length + ntChallengeResponse.length, pos); pos += 4; // EncryptedRandomSessionKeyOffset + + buf.writeUInt32LE(typeflags.NTLM_TYPE2_FLAGS, pos); pos += 4; // NegotiateFlags + + buf.writeUInt8(5, pos); pos++; // ProductMajorVersion + buf.writeUInt8(1, pos); pos++; // ProductMinorVersion + buf.writeUInt16LE(2600, pos); pos += 2; // ProductBuild + buf.writeUInt8(0, pos); pos++; // VersionReserved1 + buf.writeUInt8(0, pos); pos++; // VersionReserved2 + buf.writeUInt8(0, pos); pos++; // VersionReserved3 + buf.writeUInt8(15, pos); pos++; // NTLMRevisionCurrent + + domainNameBytes.copy(buf, pos); pos += domainNameBytes.length; + usernameBytes.copy(buf, pos); pos += usernameBytes.length; + workstationBytes.copy(buf, pos); pos += workstationBytes.length; + lmChallengeResponse.copy(buf, pos); pos += lmChallengeResponse.length; + ntChallengeResponse.copy(buf, pos); pos += ntChallengeResponse.length; + encryptedRandomSessionKeyBytes.copy(buf, pos); pos += encryptedRandomSessionKeyBytes.length; + + return 'NTLM ' + buf.toString('base64'); +} + +function create_LM_hashed_password_v1(password){ + // fix the password length to 14 bytes + password = password.toUpperCase(); + var passwordBytes = new Buffer(password, 'ascii'); + + var passwordBytesPadded = new Buffer(14); + passwordBytesPadded.fill("\0"); + var sourceEnd = 14; + if(passwordBytes.length < 14) sourceEnd = passwordBytes.length; + passwordBytes.copy(passwordBytesPadded, 0, 0, sourceEnd); + + // split into 2 parts of 7 bytes: + var firstPart = passwordBytesPadded.slice(0,7); + var secondPart = passwordBytesPadded.slice(7); + + function encrypt(buf){ + var key = insertZerosEvery7Bits(buf); + var des = crypto.createCipheriv('DES-ECB', key, ''); + return des.update("KGS!@#$%"); // page 57 in [MS-NLMP]); + } + + var firstPartEncrypted = encrypt(firstPart); + var secondPartEncrypted = encrypt(secondPart); + + return Buffer.concat([firstPartEncrypted, secondPartEncrypted]); +} + +function insertZerosEvery7Bits(buf){ + var binaryArray = bytes2binaryArray(buf); + var newBinaryArray = []; + for(var i=0; i array.length) + break; + + var binString1 = '' + array[i] + '' + array[i+1] + '' + array[i+2] + '' + array[i+3]; + var binString2 = '' + array[i+4] + '' + array[i+5] + '' + array[i+6] + '' + array[i+7]; + var hexchar1 = binary2hex[binString1]; + var hexchar2 = binary2hex[binString2]; + + var buf = new Buffer(hexchar1 + '' + hexchar2, 'hex'); + bufArray.push(buf); + } + + return Buffer.concat(bufArray); +} + +function create_NT_hashed_password_v1(password){ + var buf = new Buffer(password, 'utf16le'); + var md4 = crypto.createHash('md4'); + md4.update(buf); + return new Buffer(md4.digest()); +} + +function calc_resp(password_hash, server_challenge){ + // padding with zeros to make the hash 21 bytes long + var passHashPadded = new Buffer(21); + passHashPadded.fill("\0"); + password_hash.copy(passHashPadded, 0, 0, password_hash.length); + + var resArray = []; + + var des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(0,7)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(7,14)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + des = crypto.createCipheriv('DES-ECB', insertZerosEvery7Bits(passHashPadded.slice(14,21)), ''); + resArray.push( des.update(server_challenge.slice(0,8)) ); + + return Buffer.concat(resArray); +} + +function ntlm2sr_calc_resp(responseKeyNT, serverChallenge, clientChallenge){ + // padding with zeros to make the hash 16 bytes longer + var lmChallengeResponse = new Buffer(clientChallenge.length + 16); + lmChallengeResponse.fill("\0"); + clientChallenge.copy(lmChallengeResponse, 0, 0, clientChallenge.length); + + var buf = Buffer.concat([serverChallenge, clientChallenge]); + var md5 = crypto.createHash('md5'); + md5.update(buf); + var sess = md5.digest(); + var ntChallengeResponse = calc_resp(responseKeyNT, sess.slice(0,8)); + + return { + lmChallengeResponse: lmChallengeResponse, + ntChallengeResponse: ntChallengeResponse + }; +} + +exports.createType1Message = createType1Message; +exports.parseType2Message = parseType2Message; +exports.createType3Message = createType3Message; + + + + + +/***/ }), + +/***/ 443: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const io = __importStar(__webpack_require__(1)); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +const uuidV4 = __importStar(__webpack_require__(826)); +const constants_1 = __webpack_require__(694); +// From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 +function createTempDirectory() { + return __awaiter(this, void 0, void 0, function* () { + const IS_WINDOWS = process.platform === "win32"; + let tempDirectory = process.env["RUNNER_TEMP"] || ""; + if (!tempDirectory) { + let baseLocation; + if (IS_WINDOWS) { + // On Windows use the USERPROFILE env variable + baseLocation = process.env["USERPROFILE"] || "C:\\"; + } + else { + if (process.platform === "darwin") { + baseLocation = "/Users"; + } + else { + baseLocation = "/home"; + } + } + tempDirectory = path.join(baseLocation, "actions", "temp"); + } + const dest = path.join(tempDirectory, uuidV4.default()); + yield io.mkdirP(dest); + return dest; + }); +} +exports.createTempDirectory = createTempDirectory; +function isExactKeyMatch(key, cacheResult) { + return !!(cacheResult && + cacheResult.cacheKey && + cacheResult.cacheKey.localeCompare(key, undefined, { + sensitivity: "accent" + }) === 0); +} +exports.isExactKeyMatch = isExactKeyMatch; +function setOutputAndState(key, cacheResult) { + setCacheHitOutput(isExactKeyMatch(key, cacheResult)); + // Store the cache result if it exists + cacheResult && setCacheState(cacheResult); +} +exports.setOutputAndState = setOutputAndState; +function getCacheState() { + const stateData = core.getState(constants_1.State.CacheResult); + core.debug(`State: ${stateData}`); + return (stateData && JSON.parse(stateData)); +} +exports.getCacheState = getCacheState; +function setCacheState(state) { + core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); +} +exports.setCacheState = setCacheState; +function setCacheHitOutput(isCacheHit) { + core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); +} +exports.setCacheHitOutput = setCacheHitOutput; +function resolvePath(filePath) { + if (filePath[0] === "~") { + const home = os.homedir(); + if (!home) { + throw new Error("Unable to resolve `~` to HOME"); + } + return path.join(home, filePath.slice(1)); + } + return path.resolve(filePath); +} +exports.resolvePath = resolvePath; + + +/***/ }), + +/***/ 470: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(431); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable + */ +function exportVariable(name, val) { + process.env[name] = val; + command_1.issueCommand('set-env', { name }, val); +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + command_1.issueCommand('add-path', {}, inputPath); + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store + */ +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message + */ +function error(message) { + command_1.issue('error', message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message + */ +function warning(message) { + command_1.issue('warning', message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store + */ +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), + +/***/ 525: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +const _ = __webpack_require__(891); +const ntlm = __webpack_require__(432); +class NtlmCredentialHandler { + constructor(username, password, workstation, domain) { + this._ntlmOptions = {}; + this._ntlmOptions.username = username; + this._ntlmOptions.password = password; + if (domain !== undefined) { + this._ntlmOptions.domain = domain; + } + else { + this._ntlmOptions.domain = ''; + } + if (workstation !== undefined) { + this._ntlmOptions.workstation = workstation; + } + else { + this._ntlmOptions.workstation = ''; + } + } + prepareRequest(options) { + // No headers or options need to be set. We keep the credentials on the handler itself. + // If a (proxy) agent is set, remove it as we don't support proxy for NTLM at this time + if (options.agent) { + delete options.agent; + } + } + canHandleAuthentication(response) { + if (response && response.message && response.message.statusCode === 401) { + // Ensure that we're talking NTLM here + // Once we have the www-authenticate header, split it so we can ensure we can talk NTLM + const wwwAuthenticate = response.message.headers['www-authenticate']; + if (wwwAuthenticate) { + const mechanisms = wwwAuthenticate.split(', '); + const index = mechanisms.indexOf("NTLM"); + if (index >= 0) { + return true; + } + } + } + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return new Promise((resolve, reject) => { + const callbackForResult = function (err, res) { + if (err) { + reject(err); + } + // We have to readbody on the response before continuing otherwise there is a hang. + res.readBody().then(() => { + resolve(res); + }); + }; + this.handleAuthenticationPrivate(httpClient, requestInfo, objs, callbackForResult); + }); + } + handleAuthenticationPrivate(httpClient, requestInfo, objs, finalCallback) { + // Set up the headers for NTLM authentication + requestInfo.options = _.extend(requestInfo.options, { + username: this._ntlmOptions.username, + password: this._ntlmOptions.password, + domain: this._ntlmOptions.domain, + workstation: this._ntlmOptions.workstation + }); + if (httpClient.isSsl === true) { + requestInfo.options.agent = new https.Agent({ keepAlive: true }); + } + else { + requestInfo.options.agent = new http.Agent({ keepAlive: true }); + } + let self = this; + // The following pattern of sending the type1 message following immediately (in a setImmediate) is + // critical for the NTLM exchange to happen. If we removed setImmediate (or call in a different manner) + // the NTLM exchange will always fail with a 401. + this.sendType1Message(httpClient, requestInfo, objs, function (err, res) { + if (err) { + return finalCallback(err, null, null); + } + /// We have to readbody on the response before continuing otherwise there is a hang. + res.readBody().then(() => { + // It is critical that we have setImmediate here due to how connection requests are queued. + // If setImmediate is removed then the NTLM handshake will not work. + // setImmediate allows us to queue a second request on the same connection. If this second + // request is not queued on the connection when the first request finishes then node closes + // the connection. NTLM requires both requests to be on the same connection so we need this. + setImmediate(function () { + self.sendType3Message(httpClient, requestInfo, objs, res, finalCallback); + }); + }); + }); + } + // The following method is an adaptation of code found at https://github.com/SamDecrock/node-http-ntlm/blob/master/httpntlm.js + sendType1Message(httpClient, requestInfo, objs, finalCallback) { + const type1msg = ntlm.createType1Message(this._ntlmOptions); + const type1options = { + headers: { + 'Connection': 'keep-alive', + 'Authorization': type1msg + }, + timeout: requestInfo.options.timeout || 0, + agent: requestInfo.httpModule, + }; + const type1info = {}; + type1info.httpModule = requestInfo.httpModule; + type1info.parsedUrl = requestInfo.parsedUrl; + type1info.options = _.extend(type1options, _.omit(requestInfo.options, 'headers')); + return httpClient.requestRawWithCallback(type1info, objs, finalCallback); + } + // The following method is an adaptation of code found at https://github.com/SamDecrock/node-http-ntlm/blob/master/httpntlm.js + sendType3Message(httpClient, requestInfo, objs, res, callback) { + if (!res.message.headers && !res.message.headers['www-authenticate']) { + throw new Error('www-authenticate not found on response of second request'); + } + const type2msg = ntlm.parseType2Message(res.message.headers['www-authenticate']); + const type3msg = ntlm.createType3Message(type2msg, this._ntlmOptions); + const type3options = { + headers: { + 'Authorization': type3msg, + 'Connection': 'Close' + }, + agent: requestInfo.httpModule, + }; + const type3info = {}; + type3info.httpModule = requestInfo.httpModule; + type3info.parsedUrl = requestInfo.parsedUrl; + type3options.headers = _.extend(type3options.headers, requestInfo.options.headers); + type3info.options = _.extend(type3options, _.omit(requestInfo.options, 'headers')); + return httpClient.requestRawWithCallback(type3info, objs, callback); + } +} +exports.NtlmCredentialHandler = NtlmCredentialHandler; + + +/***/ }), + +/***/ 571: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Bearer ' + this.token; + options.headers['X-TFS-FedAuthRedirect'] = 'Suppress'; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; + + +/***/ }), + +/***/ 605: +/***/ (function(module) { + +module.exports = require("http"); + +/***/ }), + +/***/ 614: +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), + +/***/ 622: +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), + +/***/ 631: +/***/ (function(module) { + +module.exports = require("net"); + +/***/ }), + +/***/ 669: +/***/ (function(module) { + +module.exports = require("util"); + +/***/ }), + +/***/ 672: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __webpack_require__(357); +const fs = __webpack_require__(747); +const path = __webpack_require__(622); +_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.IS_WINDOWS = process.platform === 'win32'; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Recursively create a directory at `fsPath`. + * + * This implementation is optimistic, meaning it attempts to create the full + * path first, and backs up the path stack from there. + * + * @param fsPath The path to create + * @param maxDepth The maximum recursion depth + * @param depth The current recursion depth + */ +function mkdirP(fsPath, maxDepth = 1000, depth = 1) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + fsPath = path.resolve(fsPath); + if (depth >= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); +} +exports.mkdirP = mkdirP; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 681: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const exec_1 = __webpack_require__(986); +const io = __importStar(__webpack_require__(1)); +const fs = __importStar(__webpack_require__(747)); +const path = __importStar(__webpack_require__(622)); +const cacheHttpClient = __importStar(__webpack_require__(154)); +const constants_1 = __webpack_require__(694); +const utils = __importStar(__webpack_require__(443)); +function run() { + return __awaiter(this, void 0, void 0, function* () { + try { + const state = utils.getCacheState(); + // Inputs are re-evaluted before the post action, so we want the original key used for restore + const primaryKey = core.getState(constants_1.State.CacheKey); + if (!primaryKey) { + core.warning(`Error retrieving key from state.`); + return; + } + if (utils.isExactKeyMatch(primaryKey, state)) { + core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); + return; + } + let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); + core.debug(`Cache Path: ${cachePath}`); + let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); + core.debug(`Archive Path: ${archivePath}`); + // http://man7.org/linux/man-pages/man1/tar.1.html + // tar [-options] [files or directories which to add into archive] + const args = ["-cz"]; + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + args.push("--force-local"); + archivePath = archivePath.replace(/\\/g, "/"); + cachePath = cachePath.replace(/\\/g, "/"); + } + args.push(...["-f", archivePath, "-C", cachePath, "."]); + const tarPath = yield io.which("tar", true); + core.debug(`Tar Path: ${tarPath}`); + yield exec_1.exec(`"${tarPath}"`, args); + const fileSizeLimit = 200 * 1024 * 1024; // 200MB + const archiveFileSize = fs.statSync(archivePath).size; + core.debug(`File Size: ${archiveFileSize}`); + if (archiveFileSize > fileSizeLimit) { + core.warning(`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`); + return; + } + const stream = fs.createReadStream(archivePath); + yield cacheHttpClient.saveCache(stream, primaryKey); + } + catch (error) { + core.warning(error.message); + } + }); +} +run(); +exports.default = run; + + +/***/ }), + +/***/ 694: +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +var Inputs; +(function (Inputs) { + Inputs.Key = "key"; + Inputs.Path = "path"; + Inputs.RestoreKeys = "restore-keys"; +})(Inputs = exports.Inputs || (exports.Inputs = {})); +var Outputs; +(function (Outputs) { + Outputs.CacheHit = "cache-hit"; +})(Outputs = exports.Outputs || (exports.Outputs = {})); +var State; +(function (State) { + State.CacheKey = "CACHE_KEY"; + State.CacheResult = "CACHE_RESULT"; +})(State = exports.State || (exports.State = {})); + + +/***/ }), + +/***/ 722: +/***/ (function(module) { + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +var byteToHex = []; +for (var i = 0; i < 256; ++i) { + byteToHex[i] = (i + 0x100).toString(16).substr(1); +} + +function bytesToUuid(buf, offset) { + var i = offset || 0; + var bth = byteToHex; + // join used to fix memory issue caused by concatenation: https://bugs.chromium.org/p/v8/issues/detail?id=3175#c4 + return ([bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], '-', + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]], + bth[buf[i++]], bth[buf[i++]]]).join(''); +} + +module.exports = bytesToUuid; + + +/***/ }), + +/***/ 729: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +const path = __webpack_require__(622); +/** + * creates an url from a request url and optional base url (http://server:8080) + * @param {string} resource - a fully qualified url or relative path + * @param {string} baseUrl - an optional baseUrl (http://server:8080) + * @return {string} - resultant url + */ +function getUrl(resource, baseUrl) { + const pathApi = path.posix || path; + if (!baseUrl) { + return resource; + } + else if (!resource) { + return baseUrl; + } + else { + const base = url.parse(baseUrl); + const resultantUrl = url.parse(resource); + // resource (specific per request) elements take priority + resultantUrl.protocol = resultantUrl.protocol || base.protocol; + resultantUrl.auth = resultantUrl.auth || base.auth; + resultantUrl.host = resultantUrl.host || base.host; + resultantUrl.pathname = pathApi.resolve(base.pathname, resultantUrl.pathname); + if (!resultantUrl.pathname.endsWith('/') && resource.endsWith('/')) { + resultantUrl.pathname += '/'; + } + return url.format(resultantUrl); + } +} +exports.getUrl = getUrl; + + +/***/ }), + +/***/ 747: +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), + +/***/ 826: +/***/ (function(module, __unusedexports, __webpack_require__) { + +var rng = __webpack_require__(139); +var bytesToUuid = __webpack_require__(722); + +function v4(options, buf, offset) { + var i = buf && offset || 0; + + if (typeof(options) == 'string') { + buf = options === 'binary' ? new Array(16) : null; + options = null; + } + options = options || {}; + + var rnds = options.random || (options.rng || rng)(); + + // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` + rnds[6] = (rnds[6] & 0x0f) | 0x40; + rnds[8] = (rnds[8] & 0x3f) | 0x80; + + // Copy bytes to buffer, if provided + if (buf) { + for (var ii = 0; ii < 16; ++ii) { + buf[i + ii] = rnds[ii]; + } + } + + return buf || bytesToUuid(rnds); +} + +module.exports = v4; + + +/***/ }), + +/***/ 835: +/***/ (function(module) { + +module.exports = require("url"); + +/***/ }), + +/***/ 874: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +// Copyright (c) Microsoft. All rights reserved. +// Licensed under the MIT license. See LICENSE file in the project root for full license information. +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : new P(function (resolve) { resolve(result.value); }).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const url = __webpack_require__(835); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +let fs; +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; +const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () { + let output = ''; + this.message.on('data', (chunk) => { + output += chunk; + }); + this.message.on('end', () => { + resolve(output); + }); + })); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = url.parse(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +var EnvironmentVariables; +(function (EnvironmentVariables) { + EnvironmentVariables["HTTP_PROXY"] = "HTTP_PROXY"; + EnvironmentVariables["HTTPS_PROXY"] = "HTTPS_PROXY"; +})(EnvironmentVariables || (EnvironmentVariables = {})); +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + this._httpProxy = requestOptions.proxy; + if (requestOptions.proxy && requestOptions.proxy.proxyBypassHosts) { + this._httpProxyBypassHosts = []; + requestOptions.proxy.proxyBypassHosts.forEach(bypass => { + this._httpProxyBypassHosts.push(new RegExp(bypass, 'i')); + }); + } + this._certConfig = requestOptions.cert; + if (this._certConfig) { + // If using cert, need fs + fs = __webpack_require__(747); + // cache the cert content into memory, so we don't have to read it from disk every time + if (this._certConfig.caFile && fs.existsSync(this._certConfig.caFile)) { + this._ca = fs.readFileSync(this._certConfig.caFile, 'utf8'); + } + if (this._certConfig.certFile && fs.existsSync(this._certConfig.certFile)) { + this._cert = fs.readFileSync(this._certConfig.certFile, 'utf8'); + } + if (this._certConfig.keyFile && fs.existsSync(this._certConfig.keyFile)) { + this._key = fs.readFileSync(this._certConfig.keyFile, 'utf8'); + } + } + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + request(verb, requestUrl, data, headers) { + return __awaiter(this, void 0, void 0, function* () { + if (this._disposed) { + throw new Error("Client has already been disposed."); + } + let info = this._prepareRequest(verb, requestUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = yield this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 + && this._allowRedirects + && redirectsRemaining > 0) { + const redirectUrl = response.message.headers["location"]; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + yield response.readBody(); + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, redirectUrl, headers); + response = yield this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + yield response.readBody(); + yield this._performExponentialBackoff(numTries); + } + } + return response; + }); + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + let isDataString = typeof (data) === 'string'; + if (typeof (data) === 'string') { + info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', (sock) => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof (data) === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof (data) !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = url.parse(requestUrl); + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; + info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + info.options.headers["user-agent"] = this.userAgent; + info.options.agent = this._getAgent(requestUrl); + // gives handlers an opportunity to participate + if (this.handlers && !this._isPresigned(requestUrl)) { + this.handlers.forEach((handler) => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _isPresigned(requestUrl) { + if (this.requestOptions && this.requestOptions.presignedUrlPatterns) { + const patterns = this.requestOptions.presignedUrlPatterns; + for (let i = 0; i < patterns.length; i++) { + if (requestUrl.match(patterns[i])) { + return true; + } + } + } + return false; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getAgent(requestUrl) { + let agent; + let proxy = this._getProxy(requestUrl); + let useProxy = proxy.proxyUrl && proxy.proxyUrl.hostname && !this._isBypassProxy(requestUrl); + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + let parsedUrl = url.parse(requestUrl); + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(413); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: proxy.proxyAuth, + host: proxy.proxyUrl.hostname, + port: proxy.proxyUrl.port + }, + }; + let tunnelAgent; + const overHttps = proxy.proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); + } + if (usingSsl && this._certConfig) { + agent.options = Object.assign(agent.options || {}, { ca: this._ca, cert: this._cert, key: this._key, passphrase: this._certConfig.passphrase }); + } + return agent; + } + _getProxy(requestUrl) { + const parsedUrl = url.parse(requestUrl); + let usingSsl = parsedUrl.protocol === 'https:'; + let proxyConfig = this._httpProxy; + // fallback to http_proxy and https_proxy env + let https_proxy = process.env[EnvironmentVariables.HTTPS_PROXY]; + let http_proxy = process.env[EnvironmentVariables.HTTP_PROXY]; + if (!proxyConfig) { + if (https_proxy && usingSsl) { + proxyConfig = { + proxyUrl: https_proxy + }; + } + else if (http_proxy) { + proxyConfig = { + proxyUrl: http_proxy + }; + } + } + let proxyUrl; + let proxyAuth; + if (proxyConfig) { + if (proxyConfig.proxyUrl.length > 0) { + proxyUrl = url.parse(proxyConfig.proxyUrl); + } + if (proxyConfig.proxyUsername || proxyConfig.proxyPassword) { + proxyAuth = proxyConfig.proxyUsername + ":" + proxyConfig.proxyPassword; + } + } + return { proxyUrl: proxyUrl, proxyAuth: proxyAuth }; + } + _isBypassProxy(requestUrl) { + if (!this._httpProxyBypassHosts) { + return false; + } + let bypass = false; + this._httpProxyBypassHosts.forEach(bypassHost => { + if (bypassHost.test(requestUrl)) { + bypass = true; + } + }); + return bypass; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), + +/***/ 891: +/***/ (function(module, exports) { + +// Underscore.js 1.8.3 +// http://underscorejs.org +// (c) 2009-2015 Jeremy Ashkenas, DocumentCloud and Investigative Reporters & Editors +// Underscore may be freely distributed under the MIT license. + +(function() { + + // Baseline setup + // -------------- + + // Establish the root object, `window` in the browser, or `exports` on the server. + var root = this; + + // Save the previous value of the `_` variable. + var previousUnderscore = root._; + + // Save bytes in the minified (but not gzipped) version: + var ArrayProto = Array.prototype, ObjProto = Object.prototype, FuncProto = Function.prototype; + + // Create quick reference variables for speed access to core prototypes. + var + push = ArrayProto.push, + slice = ArrayProto.slice, + toString = ObjProto.toString, + hasOwnProperty = ObjProto.hasOwnProperty; + + // All **ECMAScript 5** native function implementations that we hope to use + // are declared here. + var + nativeIsArray = Array.isArray, + nativeKeys = Object.keys, + nativeBind = FuncProto.bind, + nativeCreate = Object.create; + + // Naked function reference for surrogate-prototype-swapping. + var Ctor = function(){}; + + // Create a safe reference to the Underscore object for use below. + var _ = function(obj) { + if (obj instanceof _) return obj; + if (!(this instanceof _)) return new _(obj); + this._wrapped = obj; + }; + + // Export the Underscore object for **Node.js**, with + // backwards-compatibility for the old `require()` API. If we're in + // the browser, add `_` as a global object. + if (true) { + if ( true && module.exports) { + exports = module.exports = _; + } + exports._ = _; + } else {} + + // Current version. + _.VERSION = '1.8.3'; + + // Internal function that returns an efficient (for current engines) version + // of the passed-in callback, to be repeatedly applied in other Underscore + // functions. + var optimizeCb = function(func, context, argCount) { + if (context === void 0) return func; + switch (argCount == null ? 3 : argCount) { + case 1: return function(value) { + return func.call(context, value); + }; + case 2: return function(value, other) { + return func.call(context, value, other); + }; + case 3: return function(value, index, collection) { + return func.call(context, value, index, collection); + }; + case 4: return function(accumulator, value, index, collection) { + return func.call(context, accumulator, value, index, collection); + }; + } + return function() { + return func.apply(context, arguments); + }; + }; + + // A mostly-internal function to generate callbacks that can be applied + // to each element in a collection, returning the desired result — either + // identity, an arbitrary callback, a property matcher, or a property accessor. + var cb = function(value, context, argCount) { + if (value == null) return _.identity; + if (_.isFunction(value)) return optimizeCb(value, context, argCount); + if (_.isObject(value)) return _.matcher(value); + return _.property(value); + }; + _.iteratee = function(value, context) { + return cb(value, context, Infinity); + }; + + // An internal function for creating assigner functions. + var createAssigner = function(keysFunc, undefinedOnly) { + return function(obj) { + var length = arguments.length; + if (length < 2 || obj == null) return obj; + for (var index = 1; index < length; index++) { + var source = arguments[index], + keys = keysFunc(source), + l = keys.length; + for (var i = 0; i < l; i++) { + var key = keys[i]; + if (!undefinedOnly || obj[key] === void 0) obj[key] = source[key]; + } + } + return obj; + }; + }; + + // An internal function for creating a new object that inherits from another. + var baseCreate = function(prototype) { + if (!_.isObject(prototype)) return {}; + if (nativeCreate) return nativeCreate(prototype); + Ctor.prototype = prototype; + var result = new Ctor; + Ctor.prototype = null; + return result; + }; + + var property = function(key) { + return function(obj) { + return obj == null ? void 0 : obj[key]; + }; + }; + + // Helper for collection methods to determine whether a collection + // should be iterated as an array or as an object + // Related: http://people.mozilla.org/~jorendorff/es6-draft.html#sec-tolength + // Avoids a very nasty iOS 8 JIT bug on ARM-64. #2094 + var MAX_ARRAY_INDEX = Math.pow(2, 53) - 1; + var getLength = property('length'); + var isArrayLike = function(collection) { + var length = getLength(collection); + return typeof length == 'number' && length >= 0 && length <= MAX_ARRAY_INDEX; + }; + + // Collection Functions + // -------------------- + + // The cornerstone, an `each` implementation, aka `forEach`. + // Handles raw objects in addition to array-likes. Treats all + // sparse array-likes as if they were dense. + _.each = _.forEach = function(obj, iteratee, context) { + iteratee = optimizeCb(iteratee, context); + var i, length; + if (isArrayLike(obj)) { + for (i = 0, length = obj.length; i < length; i++) { + iteratee(obj[i], i, obj); + } + } else { + var keys = _.keys(obj); + for (i = 0, length = keys.length; i < length; i++) { + iteratee(obj[keys[i]], keys[i], obj); + } + } + return obj; + }; + + // Return the results of applying the iteratee to each element. + _.map = _.collect = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + results = Array(length); + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + results[index] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Create a reducing function iterating left or right. + function createReduce(dir) { + // Optimized iterator function as using arguments.length + // in the main function will deoptimize the, see #1991. + function iterator(obj, iteratee, memo, keys, index, length) { + for (; index >= 0 && index < length; index += dir) { + var currentKey = keys ? keys[index] : index; + memo = iteratee(memo, obj[currentKey], currentKey, obj); + } + return memo; + } + + return function(obj, iteratee, memo, context) { + iteratee = optimizeCb(iteratee, context, 4); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length, + index = dir > 0 ? 0 : length - 1; + // Determine the initial value if none is provided. + if (arguments.length < 3) { + memo = obj[keys ? keys[index] : index]; + index += dir; + } + return iterator(obj, iteratee, memo, keys, index, length); + }; + } + + // **Reduce** builds up a single result from a list of values, aka `inject`, + // or `foldl`. + _.reduce = _.foldl = _.inject = createReduce(1); + + // The right-associative version of reduce, also known as `foldr`. + _.reduceRight = _.foldr = createReduce(-1); + + // Return the first value which passes a truth test. Aliased as `detect`. + _.find = _.detect = function(obj, predicate, context) { + var key; + if (isArrayLike(obj)) { + key = _.findIndex(obj, predicate, context); + } else { + key = _.findKey(obj, predicate, context); + } + if (key !== void 0 && key !== -1) return obj[key]; + }; + + // Return all the elements that pass a truth test. + // Aliased as `select`. + _.filter = _.select = function(obj, predicate, context) { + var results = []; + predicate = cb(predicate, context); + _.each(obj, function(value, index, list) { + if (predicate(value, index, list)) results.push(value); + }); + return results; + }; + + // Return all the elements for which a truth test fails. + _.reject = function(obj, predicate, context) { + return _.filter(obj, _.negate(cb(predicate)), context); + }; + + // Determine whether all of the elements match a truth test. + // Aliased as `all`. + _.every = _.all = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (!predicate(obj[currentKey], currentKey, obj)) return false; + } + return true; + }; + + // Determine if at least one element in the object matches a truth test. + // Aliased as `any`. + _.some = _.any = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = !isArrayLike(obj) && _.keys(obj), + length = (keys || obj).length; + for (var index = 0; index < length; index++) { + var currentKey = keys ? keys[index] : index; + if (predicate(obj[currentKey], currentKey, obj)) return true; + } + return false; + }; + + // Determine if the array or object contains a given item (using `===`). + // Aliased as `includes` and `include`. + _.contains = _.includes = _.include = function(obj, item, fromIndex, guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + if (typeof fromIndex != 'number' || guard) fromIndex = 0; + return _.indexOf(obj, item, fromIndex) >= 0; + }; + + // Invoke a method (with arguments) on every item in a collection. + _.invoke = function(obj, method) { + var args = slice.call(arguments, 2); + var isFunc = _.isFunction(method); + return _.map(obj, function(value) { + var func = isFunc ? method : value[method]; + return func == null ? func : func.apply(value, args); + }); + }; + + // Convenience version of a common use case of `map`: fetching a property. + _.pluck = function(obj, key) { + return _.map(obj, _.property(key)); + }; + + // Convenience version of a common use case of `filter`: selecting only objects + // containing specific `key:value` pairs. + _.where = function(obj, attrs) { + return _.filter(obj, _.matcher(attrs)); + }; + + // Convenience version of a common use case of `find`: getting the first object + // containing specific `key:value` pairs. + _.findWhere = function(obj, attrs) { + return _.find(obj, _.matcher(attrs)); + }; + + // Return the maximum element (or element-based computation). + _.max = function(obj, iteratee, context) { + var result = -Infinity, lastComputed = -Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value > result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed > lastComputed || computed === -Infinity && result === -Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Return the minimum element (or element-based computation). + _.min = function(obj, iteratee, context) { + var result = Infinity, lastComputed = Infinity, + value, computed; + if (iteratee == null && obj != null) { + obj = isArrayLike(obj) ? obj : _.values(obj); + for (var i = 0, length = obj.length; i < length; i++) { + value = obj[i]; + if (value < result) { + result = value; + } + } + } else { + iteratee = cb(iteratee, context); + _.each(obj, function(value, index, list) { + computed = iteratee(value, index, list); + if (computed < lastComputed || computed === Infinity && result === Infinity) { + result = value; + lastComputed = computed; + } + }); + } + return result; + }; + + // Shuffle a collection, using the modern version of the + // [Fisher-Yates shuffle](http://en.wikipedia.org/wiki/Fisher–Yates_shuffle). + _.shuffle = function(obj) { + var set = isArrayLike(obj) ? obj : _.values(obj); + var length = set.length; + var shuffled = Array(length); + for (var index = 0, rand; index < length; index++) { + rand = _.random(0, index); + if (rand !== index) shuffled[index] = shuffled[rand]; + shuffled[rand] = set[index]; + } + return shuffled; + }; + + // Sample **n** random values from a collection. + // If **n** is not specified, returns a single random element. + // The internal `guard` argument allows it to work with `map`. + _.sample = function(obj, n, guard) { + if (n == null || guard) { + if (!isArrayLike(obj)) obj = _.values(obj); + return obj[_.random(obj.length - 1)]; + } + return _.shuffle(obj).slice(0, Math.max(0, n)); + }; + + // Sort the object's values by a criterion produced by an iteratee. + _.sortBy = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + return _.pluck(_.map(obj, function(value, index, list) { + return { + value: value, + index: index, + criteria: iteratee(value, index, list) + }; + }).sort(function(left, right) { + var a = left.criteria; + var b = right.criteria; + if (a !== b) { + if (a > b || a === void 0) return 1; + if (a < b || b === void 0) return -1; + } + return left.index - right.index; + }), 'value'); + }; + + // An internal function used for aggregate "group by" operations. + var group = function(behavior) { + return function(obj, iteratee, context) { + var result = {}; + iteratee = cb(iteratee, context); + _.each(obj, function(value, index) { + var key = iteratee(value, index, obj); + behavior(result, value, key); + }); + return result; + }; + }; + + // Groups the object's values by a criterion. Pass either a string attribute + // to group by, or a function that returns the criterion. + _.groupBy = group(function(result, value, key) { + if (_.has(result, key)) result[key].push(value); else result[key] = [value]; + }); + + // Indexes the object's values by a criterion, similar to `groupBy`, but for + // when you know that your index values will be unique. + _.indexBy = group(function(result, value, key) { + result[key] = value; + }); + + // Counts instances of an object that group by a certain criterion. Pass + // either a string attribute to count by, or a function that returns the + // criterion. + _.countBy = group(function(result, value, key) { + if (_.has(result, key)) result[key]++; else result[key] = 1; + }); + + // Safely create a real, live array from anything iterable. + _.toArray = function(obj) { + if (!obj) return []; + if (_.isArray(obj)) return slice.call(obj); + if (isArrayLike(obj)) return _.map(obj, _.identity); + return _.values(obj); + }; + + // Return the number of elements in an object. + _.size = function(obj) { + if (obj == null) return 0; + return isArrayLike(obj) ? obj.length : _.keys(obj).length; + }; + + // Split a collection into two arrays: one whose elements all satisfy the given + // predicate, and one whose elements all do not satisfy the predicate. + _.partition = function(obj, predicate, context) { + predicate = cb(predicate, context); + var pass = [], fail = []; + _.each(obj, function(value, key, obj) { + (predicate(value, key, obj) ? pass : fail).push(value); + }); + return [pass, fail]; + }; + + // Array Functions + // --------------- + + // Get the first element of an array. Passing **n** will return the first N + // values in the array. Aliased as `head` and `take`. The **guard** check + // allows it to work with `_.map`. + _.first = _.head = _.take = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[0]; + return _.initial(array, array.length - n); + }; + + // Returns everything but the last entry of the array. Especially useful on + // the arguments object. Passing **n** will return all the values in + // the array, excluding the last N. + _.initial = function(array, n, guard) { + return slice.call(array, 0, Math.max(0, array.length - (n == null || guard ? 1 : n))); + }; + + // Get the last element of an array. Passing **n** will return the last N + // values in the array. + _.last = function(array, n, guard) { + if (array == null) return void 0; + if (n == null || guard) return array[array.length - 1]; + return _.rest(array, Math.max(0, array.length - n)); + }; + + // Returns everything but the first entry of the array. Aliased as `tail` and `drop`. + // Especially useful on the arguments object. Passing an **n** will return + // the rest N values in the array. + _.rest = _.tail = _.drop = function(array, n, guard) { + return slice.call(array, n == null || guard ? 1 : n); + }; + + // Trim out all falsy values from an array. + _.compact = function(array) { + return _.filter(array, _.identity); + }; + + // Internal implementation of a recursive `flatten` function. + var flatten = function(input, shallow, strict, startIndex) { + var output = [], idx = 0; + for (var i = startIndex || 0, length = getLength(input); i < length; i++) { + var value = input[i]; + if (isArrayLike(value) && (_.isArray(value) || _.isArguments(value))) { + //flatten current level of array or arguments object + if (!shallow) value = flatten(value, shallow, strict); + var j = 0, len = value.length; + output.length += len; + while (j < len) { + output[idx++] = value[j++]; + } + } else if (!strict) { + output[idx++] = value; + } + } + return output; + }; + + // Flatten out an array, either recursively (by default), or just one level. + _.flatten = function(array, shallow) { + return flatten(array, shallow, false); + }; + + // Return a version of the array that does not contain the specified value(s). + _.without = function(array) { + return _.difference(array, slice.call(arguments, 1)); + }; + + // Produce a duplicate-free version of the array. If the array has already + // been sorted, you have the option of using a faster algorithm. + // Aliased as `unique`. + _.uniq = _.unique = function(array, isSorted, iteratee, context) { + if (!_.isBoolean(isSorted)) { + context = iteratee; + iteratee = isSorted; + isSorted = false; + } + if (iteratee != null) iteratee = cb(iteratee, context); + var result = []; + var seen = []; + for (var i = 0, length = getLength(array); i < length; i++) { + var value = array[i], + computed = iteratee ? iteratee(value, i, array) : value; + if (isSorted) { + if (!i || seen !== computed) result.push(value); + seen = computed; + } else if (iteratee) { + if (!_.contains(seen, computed)) { + seen.push(computed); + result.push(value); + } + } else if (!_.contains(result, value)) { + result.push(value); + } + } + return result; + }; + + // Produce an array that contains the union: each distinct element from all of + // the passed-in arrays. + _.union = function() { + return _.uniq(flatten(arguments, true, true)); + }; + + // Produce an array that contains every item shared between all the + // passed-in arrays. + _.intersection = function(array) { + var result = []; + var argsLength = arguments.length; + for (var i = 0, length = getLength(array); i < length; i++) { + var item = array[i]; + if (_.contains(result, item)) continue; + for (var j = 1; j < argsLength; j++) { + if (!_.contains(arguments[j], item)) break; + } + if (j === argsLength) result.push(item); + } + return result; + }; + + // Take the difference between one array and a number of other arrays. + // Only the elements present in just the first array will remain. + _.difference = function(array) { + var rest = flatten(arguments, true, true, 1); + return _.filter(array, function(value){ + return !_.contains(rest, value); + }); + }; + + // Zip together multiple lists into a single array -- elements that share + // an index go together. + _.zip = function() { + return _.unzip(arguments); + }; + + // Complement of _.zip. Unzip accepts an array of arrays and groups + // each array's elements on shared indices + _.unzip = function(array) { + var length = array && _.max(array, getLength).length || 0; + var result = Array(length); + + for (var index = 0; index < length; index++) { + result[index] = _.pluck(array, index); + } + return result; + }; + + // Converts lists into objects. Pass either a single array of `[key, value]` + // pairs, or two parallel arrays of the same length -- one of keys, and one of + // the corresponding values. + _.object = function(list, values) { + var result = {}; + for (var i = 0, length = getLength(list); i < length; i++) { + if (values) { + result[list[i]] = values[i]; + } else { + result[list[i][0]] = list[i][1]; + } + } + return result; + }; + + // Generator function to create the findIndex and findLastIndex functions + function createPredicateIndexFinder(dir) { + return function(array, predicate, context) { + predicate = cb(predicate, context); + var length = getLength(array); + var index = dir > 0 ? 0 : length - 1; + for (; index >= 0 && index < length; index += dir) { + if (predicate(array[index], index, array)) return index; + } + return -1; + }; + } + + // Returns the first index on an array-like that passes a predicate test + _.findIndex = createPredicateIndexFinder(1); + _.findLastIndex = createPredicateIndexFinder(-1); + + // Use a comparator function to figure out the smallest index at which + // an object should be inserted so as to maintain order. Uses binary search. + _.sortedIndex = function(array, obj, iteratee, context) { + iteratee = cb(iteratee, context, 1); + var value = iteratee(obj); + var low = 0, high = getLength(array); + while (low < high) { + var mid = Math.floor((low + high) / 2); + if (iteratee(array[mid]) < value) low = mid + 1; else high = mid; + } + return low; + }; + + // Generator function to create the indexOf and lastIndexOf functions + function createIndexFinder(dir, predicateFind, sortedIndex) { + return function(array, item, idx) { + var i = 0, length = getLength(array); + if (typeof idx == 'number') { + if (dir > 0) { + i = idx >= 0 ? idx : Math.max(idx + length, i); + } else { + length = idx >= 0 ? Math.min(idx + 1, length) : idx + length + 1; + } + } else if (sortedIndex && idx && length) { + idx = sortedIndex(array, item); + return array[idx] === item ? idx : -1; + } + if (item !== item) { + idx = predicateFind(slice.call(array, i, length), _.isNaN); + return idx >= 0 ? idx + i : -1; + } + for (idx = dir > 0 ? i : length - 1; idx >= 0 && idx < length; idx += dir) { + if (array[idx] === item) return idx; + } + return -1; + }; + } + + // Return the position of the first occurrence of an item in an array, + // or -1 if the item is not included in the array. + // If the array is large and already in sort order, pass `true` + // for **isSorted** to use binary search. + _.indexOf = createIndexFinder(1, _.findIndex, _.sortedIndex); + _.lastIndexOf = createIndexFinder(-1, _.findLastIndex); + + // Generate an integer Array containing an arithmetic progression. A port of + // the native Python `range()` function. See + // [the Python documentation](http://docs.python.org/library/functions.html#range). + _.range = function(start, stop, step) { + if (stop == null) { + stop = start || 0; + start = 0; + } + step = step || 1; + + var length = Math.max(Math.ceil((stop - start) / step), 0); + var range = Array(length); + + for (var idx = 0; idx < length; idx++, start += step) { + range[idx] = start; + } + + return range; + }; + + // Function (ahem) Functions + // ------------------ + + // Determines whether to execute a function as a constructor + // or a normal function with the provided arguments + var executeBound = function(sourceFunc, boundFunc, context, callingContext, args) { + if (!(callingContext instanceof boundFunc)) return sourceFunc.apply(context, args); + var self = baseCreate(sourceFunc.prototype); + var result = sourceFunc.apply(self, args); + if (_.isObject(result)) return result; + return self; + }; + + // Create a function bound to a given object (assigning `this`, and arguments, + // optionally). Delegates to **ECMAScript 5**'s native `Function.bind` if + // available. + _.bind = function(func, context) { + if (nativeBind && func.bind === nativeBind) return nativeBind.apply(func, slice.call(arguments, 1)); + if (!_.isFunction(func)) throw new TypeError('Bind must be called on a function'); + var args = slice.call(arguments, 2); + var bound = function() { + return executeBound(func, bound, context, this, args.concat(slice.call(arguments))); + }; + return bound; + }; + + // Partially apply a function by creating a version that has had some of its + // arguments pre-filled, without changing its dynamic `this` context. _ acts + // as a placeholder, allowing any combination of arguments to be pre-filled. + _.partial = function(func) { + var boundArgs = slice.call(arguments, 1); + var bound = function() { + var position = 0, length = boundArgs.length; + var args = Array(length); + for (var i = 0; i < length; i++) { + args[i] = boundArgs[i] === _ ? arguments[position++] : boundArgs[i]; + } + while (position < arguments.length) args.push(arguments[position++]); + return executeBound(func, bound, this, this, args); + }; + return bound; + }; + + // Bind a number of an object's methods to that object. Remaining arguments + // are the method names to be bound. Useful for ensuring that all callbacks + // defined on an object belong to it. + _.bindAll = function(obj) { + var i, length = arguments.length, key; + if (length <= 1) throw new Error('bindAll must be passed function names'); + for (i = 1; i < length; i++) { + key = arguments[i]; + obj[key] = _.bind(obj[key], obj); + } + return obj; + }; + + // Memoize an expensive function by storing its results. + _.memoize = function(func, hasher) { + var memoize = function(key) { + var cache = memoize.cache; + var address = '' + (hasher ? hasher.apply(this, arguments) : key); + if (!_.has(cache, address)) cache[address] = func.apply(this, arguments); + return cache[address]; + }; + memoize.cache = {}; + return memoize; + }; + + // Delays a function for the given number of milliseconds, and then calls + // it with the arguments supplied. + _.delay = function(func, wait) { + var args = slice.call(arguments, 2); + return setTimeout(function(){ + return func.apply(null, args); + }, wait); + }; + + // Defers a function, scheduling it to run after the current call stack has + // cleared. + _.defer = _.partial(_.delay, _, 1); + + // Returns a function, that, when invoked, will only be triggered at most once + // during a given window of time. Normally, the throttled function will run + // as much as it can, without ever going more than once per `wait` duration; + // but if you'd like to disable the execution on the leading edge, pass + // `{leading: false}`. To disable execution on the trailing edge, ditto. + _.throttle = function(func, wait, options) { + var context, args, result; + var timeout = null; + var previous = 0; + if (!options) options = {}; + var later = function() { + previous = options.leading === false ? 0 : _.now(); + timeout = null; + result = func.apply(context, args); + if (!timeout) context = args = null; + }; + return function() { + var now = _.now(); + if (!previous && options.leading === false) previous = now; + var remaining = wait - (now - previous); + context = this; + args = arguments; + if (remaining <= 0 || remaining > wait) { + if (timeout) { + clearTimeout(timeout); + timeout = null; + } + previous = now; + result = func.apply(context, args); + if (!timeout) context = args = null; + } else if (!timeout && options.trailing !== false) { + timeout = setTimeout(later, remaining); + } + return result; + }; + }; + + // Returns a function, that, as long as it continues to be invoked, will not + // be triggered. The function will be called after it stops being called for + // N milliseconds. If `immediate` is passed, trigger the function on the + // leading edge, instead of the trailing. + _.debounce = function(func, wait, immediate) { + var timeout, args, context, timestamp, result; + + var later = function() { + var last = _.now() - timestamp; + + if (last < wait && last >= 0) { + timeout = setTimeout(later, wait - last); + } else { + timeout = null; + if (!immediate) { + result = func.apply(context, args); + if (!timeout) context = args = null; + } + } + }; + + return function() { + context = this; + args = arguments; + timestamp = _.now(); + var callNow = immediate && !timeout; + if (!timeout) timeout = setTimeout(later, wait); + if (callNow) { + result = func.apply(context, args); + context = args = null; + } + + return result; + }; + }; + + // Returns the first function passed as an argument to the second, + // allowing you to adjust arguments, run code before and after, and + // conditionally execute the original function. + _.wrap = function(func, wrapper) { + return _.partial(wrapper, func); + }; + + // Returns a negated version of the passed-in predicate. + _.negate = function(predicate) { + return function() { + return !predicate.apply(this, arguments); + }; + }; + + // Returns a function that is the composition of a list of functions, each + // consuming the return value of the function that follows. + _.compose = function() { + var args = arguments; + var start = args.length - 1; + return function() { + var i = start; + var result = args[start].apply(this, arguments); + while (i--) result = args[i].call(this, result); + return result; + }; + }; + + // Returns a function that will only be executed on and after the Nth call. + _.after = function(times, func) { + return function() { + if (--times < 1) { + return func.apply(this, arguments); + } + }; + }; + + // Returns a function that will only be executed up to (but not including) the Nth call. + _.before = function(times, func) { + var memo; + return function() { + if (--times > 0) { + memo = func.apply(this, arguments); + } + if (times <= 1) func = null; + return memo; + }; + }; + + // Returns a function that will be executed at most one time, no matter how + // often you call it. Useful for lazy initialization. + _.once = _.partial(_.before, 2); + + // Object Functions + // ---------------- + + // Keys in IE < 9 that won't be iterated by `for key in ...` and thus missed. + var hasEnumBug = !{toString: null}.propertyIsEnumerable('toString'); + var nonEnumerableProps = ['valueOf', 'isPrototypeOf', 'toString', + 'propertyIsEnumerable', 'hasOwnProperty', 'toLocaleString']; + + function collectNonEnumProps(obj, keys) { + var nonEnumIdx = nonEnumerableProps.length; + var constructor = obj.constructor; + var proto = (_.isFunction(constructor) && constructor.prototype) || ObjProto; + + // Constructor is a special case. + var prop = 'constructor'; + if (_.has(obj, prop) && !_.contains(keys, prop)) keys.push(prop); + + while (nonEnumIdx--) { + prop = nonEnumerableProps[nonEnumIdx]; + if (prop in obj && obj[prop] !== proto[prop] && !_.contains(keys, prop)) { + keys.push(prop); + } + } + } + + // Retrieve the names of an object's own properties. + // Delegates to **ECMAScript 5**'s native `Object.keys` + _.keys = function(obj) { + if (!_.isObject(obj)) return []; + if (nativeKeys) return nativeKeys(obj); + var keys = []; + for (var key in obj) if (_.has(obj, key)) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve all the property names of an object. + _.allKeys = function(obj) { + if (!_.isObject(obj)) return []; + var keys = []; + for (var key in obj) keys.push(key); + // Ahem, IE < 9. + if (hasEnumBug) collectNonEnumProps(obj, keys); + return keys; + }; + + // Retrieve the values of an object's properties. + _.values = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var values = Array(length); + for (var i = 0; i < length; i++) { + values[i] = obj[keys[i]]; + } + return values; + }; + + // Returns the results of applying the iteratee to each element of the object + // In contrast to _.map it returns an object + _.mapObject = function(obj, iteratee, context) { + iteratee = cb(iteratee, context); + var keys = _.keys(obj), + length = keys.length, + results = {}, + currentKey; + for (var index = 0; index < length; index++) { + currentKey = keys[index]; + results[currentKey] = iteratee(obj[currentKey], currentKey, obj); + } + return results; + }; + + // Convert an object into a list of `[key, value]` pairs. + _.pairs = function(obj) { + var keys = _.keys(obj); + var length = keys.length; + var pairs = Array(length); + for (var i = 0; i < length; i++) { + pairs[i] = [keys[i], obj[keys[i]]]; + } + return pairs; + }; + + // Invert the keys and values of an object. The values must be serializable. + _.invert = function(obj) { + var result = {}; + var keys = _.keys(obj); + for (var i = 0, length = keys.length; i < length; i++) { + result[obj[keys[i]]] = keys[i]; + } + return result; + }; + + // Return a sorted list of the function names available on the object. + // Aliased as `methods` + _.functions = _.methods = function(obj) { + var names = []; + for (var key in obj) { + if (_.isFunction(obj[key])) names.push(key); + } + return names.sort(); + }; + + // Extend a given object with all the properties in passed-in object(s). + _.extend = createAssigner(_.allKeys); + + // Assigns a given object with all the own properties in the passed-in object(s) + // (https://developer.mozilla.org/docs/Web/JavaScript/Reference/Global_Objects/Object/assign) + _.extendOwn = _.assign = createAssigner(_.keys); + + // Returns the first key on an object that passes a predicate test + _.findKey = function(obj, predicate, context) { + predicate = cb(predicate, context); + var keys = _.keys(obj), key; + for (var i = 0, length = keys.length; i < length; i++) { + key = keys[i]; + if (predicate(obj[key], key, obj)) return key; + } + }; + + // Return a copy of the object only containing the whitelisted properties. + _.pick = function(object, oiteratee, context) { + var result = {}, obj = object, iteratee, keys; + if (obj == null) return result; + if (_.isFunction(oiteratee)) { + keys = _.allKeys(obj); + iteratee = optimizeCb(oiteratee, context); + } else { + keys = flatten(arguments, false, false, 1); + iteratee = function(value, key, obj) { return key in obj; }; + obj = Object(obj); + } + for (var i = 0, length = keys.length; i < length; i++) { + var key = keys[i]; + var value = obj[key]; + if (iteratee(value, key, obj)) result[key] = value; + } + return result; + }; + + // Return a copy of the object without the blacklisted properties. + _.omit = function(obj, iteratee, context) { + if (_.isFunction(iteratee)) { + iteratee = _.negate(iteratee); + } else { + var keys = _.map(flatten(arguments, false, false, 1), String); + iteratee = function(value, key) { + return !_.contains(keys, key); + }; + } + return _.pick(obj, iteratee, context); + }; + + // Fill in a given object with default properties. + _.defaults = createAssigner(_.allKeys, true); + + // Creates an object that inherits from the given prototype object. + // If additional properties are provided then they will be added to the + // created object. + _.create = function(prototype, props) { + var result = baseCreate(prototype); + if (props) _.extendOwn(result, props); + return result; + }; + + // Create a (shallow-cloned) duplicate of an object. + _.clone = function(obj) { + if (!_.isObject(obj)) return obj; + return _.isArray(obj) ? obj.slice() : _.extend({}, obj); + }; + + // Invokes interceptor with the obj, and then returns obj. + // The primary purpose of this method is to "tap into" a method chain, in + // order to perform operations on intermediate results within the chain. + _.tap = function(obj, interceptor) { + interceptor(obj); + return obj; + }; + + // Returns whether an object has a given set of `key:value` pairs. + _.isMatch = function(object, attrs) { + var keys = _.keys(attrs), length = keys.length; + if (object == null) return !length; + var obj = Object(object); + for (var i = 0; i < length; i++) { + var key = keys[i]; + if (attrs[key] !== obj[key] || !(key in obj)) return false; + } + return true; + }; + + + // Internal recursive comparison function for `isEqual`. + var eq = function(a, b, aStack, bStack) { + // Identical objects are equal. `0 === -0`, but they aren't identical. + // See the [Harmony `egal` proposal](http://wiki.ecmascript.org/doku.php?id=harmony:egal). + if (a === b) return a !== 0 || 1 / a === 1 / b; + // A strict comparison is necessary because `null == undefined`. + if (a == null || b == null) return a === b; + // Unwrap any wrapped objects. + if (a instanceof _) a = a._wrapped; + if (b instanceof _) b = b._wrapped; + // Compare `[[Class]]` names. + var className = toString.call(a); + if (className !== toString.call(b)) return false; + switch (className) { + // Strings, numbers, regular expressions, dates, and booleans are compared by value. + case '[object RegExp]': + // RegExps are coerced to strings for comparison (Note: '' + /a/i === '/a/i') + case '[object String]': + // Primitives and their corresponding object wrappers are equivalent; thus, `"5"` is + // equivalent to `new String("5")`. + return '' + a === '' + b; + case '[object Number]': + // `NaN`s are equivalent, but non-reflexive. + // Object(NaN) is equivalent to NaN + if (+a !== +a) return +b !== +b; + // An `egal` comparison is performed for other numeric values. + return +a === 0 ? 1 / +a === 1 / b : +a === +b; + case '[object Date]': + case '[object Boolean]': + // Coerce dates and booleans to numeric primitive values. Dates are compared by their + // millisecond representations. Note that invalid dates with millisecond representations + // of `NaN` are not equivalent. + return +a === +b; + } + + var areArrays = className === '[object Array]'; + if (!areArrays) { + if (typeof a != 'object' || typeof b != 'object') return false; + + // Objects with different constructors are not equivalent, but `Object`s or `Array`s + // from different frames are. + var aCtor = a.constructor, bCtor = b.constructor; + if (aCtor !== bCtor && !(_.isFunction(aCtor) && aCtor instanceof aCtor && + _.isFunction(bCtor) && bCtor instanceof bCtor) + && ('constructor' in a && 'constructor' in b)) { + return false; + } + } + // Assume equality for cyclic structures. The algorithm for detecting cyclic + // structures is adapted from ES 5.1 section 15.12.3, abstract operation `JO`. + + // Initializing stack of traversed objects. + // It's done here since we only need them for objects and arrays comparison. + aStack = aStack || []; + bStack = bStack || []; + var length = aStack.length; + while (length--) { + // Linear search. Performance is inversely proportional to the number of + // unique nested structures. + if (aStack[length] === a) return bStack[length] === b; + } + + // Add the first object to the stack of traversed objects. + aStack.push(a); + bStack.push(b); + + // Recursively compare objects and arrays. + if (areArrays) { + // Compare array lengths to determine if a deep comparison is necessary. + length = a.length; + if (length !== b.length) return false; + // Deep compare the contents, ignoring non-numeric properties. + while (length--) { + if (!eq(a[length], b[length], aStack, bStack)) return false; + } + } else { + // Deep compare objects. + var keys = _.keys(a), key; + length = keys.length; + // Ensure that both objects contain the same number of properties before comparing deep equality. + if (_.keys(b).length !== length) return false; + while (length--) { + // Deep compare each member + key = keys[length]; + if (!(_.has(b, key) && eq(a[key], b[key], aStack, bStack))) return false; + } + } + // Remove the first object from the stack of traversed objects. + aStack.pop(); + bStack.pop(); + return true; + }; + + // Perform a deep comparison to check if two objects are equal. + _.isEqual = function(a, b) { + return eq(a, b); + }; + + // Is a given array, string, or object empty? + // An "empty" object has no enumerable own-properties. + _.isEmpty = function(obj) { + if (obj == null) return true; + if (isArrayLike(obj) && (_.isArray(obj) || _.isString(obj) || _.isArguments(obj))) return obj.length === 0; + return _.keys(obj).length === 0; + }; + + // Is a given value a DOM element? + _.isElement = function(obj) { + return !!(obj && obj.nodeType === 1); + }; + + // Is a given value an array? + // Delegates to ECMA5's native Array.isArray + _.isArray = nativeIsArray || function(obj) { + return toString.call(obj) === '[object Array]'; + }; + + // Is a given variable an object? + _.isObject = function(obj) { + var type = typeof obj; + return type === 'function' || type === 'object' && !!obj; + }; + + // Add some isType methods: isArguments, isFunction, isString, isNumber, isDate, isRegExp, isError. + _.each(['Arguments', 'Function', 'String', 'Number', 'Date', 'RegExp', 'Error'], function(name) { + _['is' + name] = function(obj) { + return toString.call(obj) === '[object ' + name + ']'; + }; + }); + + // Define a fallback version of the method in browsers (ahem, IE < 9), where + // there isn't any inspectable "Arguments" type. + if (!_.isArguments(arguments)) { + _.isArguments = function(obj) { + return _.has(obj, 'callee'); + }; + } + + // Optimize `isFunction` if appropriate. Work around some typeof bugs in old v8, + // IE 11 (#1621), and in Safari 8 (#1929). + if ( true && typeof Int8Array != 'object') { + _.isFunction = function(obj) { + return typeof obj == 'function' || false; + }; + } + + // Is a given object a finite number? + _.isFinite = function(obj) { + return isFinite(obj) && !isNaN(parseFloat(obj)); + }; + + // Is the given value `NaN`? (NaN is the only number which does not equal itself). + _.isNaN = function(obj) { + return _.isNumber(obj) && obj !== +obj; + }; + + // Is a given value a boolean? + _.isBoolean = function(obj) { + return obj === true || obj === false || toString.call(obj) === '[object Boolean]'; + }; + + // Is a given value equal to null? + _.isNull = function(obj) { + return obj === null; + }; + + // Is a given variable undefined? + _.isUndefined = function(obj) { + return obj === void 0; + }; + + // Shortcut function for checking if an object has a given property directly + // on itself (in other words, not on a prototype). + _.has = function(obj, key) { + return obj != null && hasOwnProperty.call(obj, key); + }; + + // Utility Functions + // ----------------- + + // Run Underscore.js in *noConflict* mode, returning the `_` variable to its + // previous owner. Returns a reference to the Underscore object. + _.noConflict = function() { + root._ = previousUnderscore; + return this; + }; + + // Keep the identity function around for default iteratees. + _.identity = function(value) { + return value; + }; + + // Predicate-generating functions. Often useful outside of Underscore. + _.constant = function(value) { + return function() { + return value; + }; + }; + + _.noop = function(){}; + + _.property = property; + + // Generates a function for a given object that returns a given property. + _.propertyOf = function(obj) { + return obj == null ? function(){} : function(key) { + return obj[key]; + }; + }; + + // Returns a predicate for checking whether an object has a given set of + // `key:value` pairs. + _.matcher = _.matches = function(attrs) { + attrs = _.extendOwn({}, attrs); + return function(obj) { + return _.isMatch(obj, attrs); + }; + }; + + // Run a function **n** times. + _.times = function(n, iteratee, context) { + var accum = Array(Math.max(0, n)); + iteratee = optimizeCb(iteratee, context, 1); + for (var i = 0; i < n; i++) accum[i] = iteratee(i); + return accum; + }; + + // Return a random integer between min and max (inclusive). + _.random = function(min, max) { + if (max == null) { + max = min; + min = 0; + } + return min + Math.floor(Math.random() * (max - min + 1)); + }; + + // A (possibly faster) way to get the current timestamp as an integer. + _.now = Date.now || function() { + return new Date().getTime(); + }; + + // List of HTML entities for escaping. + var escapeMap = { + '&': '&', + '<': '<', + '>': '>', + '"': '"', + "'": ''', + '`': '`' + }; + var unescapeMap = _.invert(escapeMap); + + // Functions for escaping and unescaping strings to/from HTML interpolation. + var createEscaper = function(map) { + var escaper = function(match) { + return map[match]; + }; + // Regexes for identifying a key that needs to be escaped + var source = '(?:' + _.keys(map).join('|') + ')'; + var testRegexp = RegExp(source); + var replaceRegexp = RegExp(source, 'g'); + return function(string) { + string = string == null ? '' : '' + string; + return testRegexp.test(string) ? string.replace(replaceRegexp, escaper) : string; + }; + }; + _.escape = createEscaper(escapeMap); + _.unescape = createEscaper(unescapeMap); + + // If the value of the named `property` is a function then invoke it with the + // `object` as context; otherwise, return it. + _.result = function(object, property, fallback) { + var value = object == null ? void 0 : object[property]; + if (value === void 0) { + value = fallback; + } + return _.isFunction(value) ? value.call(object) : value; + }; + + // Generate a unique integer id (unique within the entire client session). + // Useful for temporary DOM ids. + var idCounter = 0; + _.uniqueId = function(prefix) { + var id = ++idCounter + ''; + return prefix ? prefix + id : id; + }; + + // By default, Underscore uses ERB-style template delimiters, change the + // following template settings to use alternative delimiters. + _.templateSettings = { + evaluate : /<%([\s\S]+?)%>/g, + interpolate : /<%=([\s\S]+?)%>/g, + escape : /<%-([\s\S]+?)%>/g + }; + + // When customizing `templateSettings`, if you don't want to define an + // interpolation, evaluation or escaping regex, we need one that is + // guaranteed not to match. + var noMatch = /(.)^/; + + // Certain characters need to be escaped so that they can be put into a + // string literal. + var escapes = { + "'": "'", + '\\': '\\', + '\r': 'r', + '\n': 'n', + '\u2028': 'u2028', + '\u2029': 'u2029' + }; + + var escaper = /\\|'|\r|\n|\u2028|\u2029/g; + + var escapeChar = function(match) { + return '\\' + escapes[match]; + }; + + // JavaScript micro-templating, similar to John Resig's implementation. + // Underscore templating handles arbitrary delimiters, preserves whitespace, + // and correctly escapes quotes within interpolated code. + // NB: `oldSettings` only exists for backwards compatibility. + _.template = function(text, settings, oldSettings) { + if (!settings && oldSettings) settings = oldSettings; + settings = _.defaults({}, settings, _.templateSettings); + + // Combine delimiters into one regular expression via alternation. + var matcher = RegExp([ + (settings.escape || noMatch).source, + (settings.interpolate || noMatch).source, + (settings.evaluate || noMatch).source + ].join('|') + '|$', 'g'); + + // Compile the template source, escaping string literals appropriately. + var index = 0; + var source = "__p+='"; + text.replace(matcher, function(match, escape, interpolate, evaluate, offset) { + source += text.slice(index, offset).replace(escaper, escapeChar); + index = offset + match.length; + + if (escape) { + source += "'+\n((__t=(" + escape + "))==null?'':_.escape(__t))+\n'"; + } else if (interpolate) { + source += "'+\n((__t=(" + interpolate + "))==null?'':__t)+\n'"; + } else if (evaluate) { + source += "';\n" + evaluate + "\n__p+='"; + } + + // Adobe VMs need the match returned to produce the correct offest. + return match; + }); + source += "';\n"; + + // If a variable is not specified, place data values in local scope. + if (!settings.variable) source = 'with(obj||{}){\n' + source + '}\n'; + + source = "var __t,__p='',__j=Array.prototype.join," + + "print=function(){__p+=__j.call(arguments,'');};\n" + + source + 'return __p;\n'; + + try { + var render = new Function(settings.variable || 'obj', '_', source); + } catch (e) { + e.source = source; + throw e; + } + + var template = function(data) { + return render.call(this, data, _); + }; + + // Provide the compiled source as a convenience for precompilation. + var argument = settings.variable || 'obj'; + template.source = 'function(' + argument + '){\n' + source + '}'; + + return template; + }; + + // Add a "chain" function. Start chaining a wrapped Underscore object. + _.chain = function(obj) { + var instance = _(obj); + instance._chain = true; + return instance; + }; + + // OOP + // --------------- + // If Underscore is called as a function, it returns a wrapped object that + // can be used OO-style. This wrapper holds altered versions of all the + // underscore functions. Wrapped objects may be chained. + + // Helper function to continue chaining intermediate results. + var result = function(instance, obj) { + return instance._chain ? _(obj).chain() : obj; + }; + + // Add your own custom functions to the Underscore object. + _.mixin = function(obj) { + _.each(_.functions(obj), function(name) { + var func = _[name] = obj[name]; + _.prototype[name] = function() { + var args = [this._wrapped]; + push.apply(args, arguments); + return result(this, func.apply(_, args)); + }; + }); + }; + + // Add all of the Underscore functions to the wrapper object. + _.mixin(_); + + // Add all mutator Array functions to the wrapper. + _.each(['pop', 'push', 'reverse', 'shift', 'sort', 'splice', 'unshift'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + var obj = this._wrapped; + method.apply(obj, arguments); + if ((name === 'shift' || name === 'splice') && obj.length === 0) delete obj[0]; + return result(this, obj); + }; + }); + + // Add all accessor Array functions to the wrapper. + _.each(['concat', 'join', 'slice'], function(name) { + var method = ArrayProto[name]; + _.prototype[name] = function() { + return result(this, method.apply(this._wrapped, arguments)); + }; + }); + + // Extracts the result from a wrapped and chained object. + _.prototype.value = function() { + return this._wrapped; + }; + + // Provide unwrapping proxy for some methods used in engine operations + // such as arithmetic and JSON stringification. + _.prototype.valueOf = _.prototype.toJSON = _.prototype.value; + + _.prototype.toString = function() { + return '' + this._wrapped; + }; + + // AMD registration happens at the end for compatibility with AMD loaders + // that may not enforce next-turn semantics on modules. Even though general + // practice for AMD registration is to be anonymous, underscore registers + // as a named module because, like jQuery, it is a base library that is + // popular enough to be bundled in a third party lib, but not be part of + // an AMD load request. Those cases could generate an error when an + // anonymous define() is called outside of a loader request. + if (typeof define === 'function' && define.amd) { + define('underscore', [], function() { + return _; + }); + } +}.call(this)); + + +/***/ }), + +/***/ 941: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +var basiccreds_1 = __webpack_require__(12); +exports.BasicCredentialHandler = basiccreds_1.BasicCredentialHandler; +var bearertoken_1 = __webpack_require__(571); +exports.BearerCredentialHandler = bearertoken_1.BearerCredentialHandler; +var ntlm_1 = __webpack_require__(525); +exports.NtlmCredentialHandler = ntlm_1.NtlmCredentialHandler; +var personalaccesstoken_1 = __webpack_require__(327); +exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; + + +/***/ }), + +/***/ 986: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const tr = __webpack_require__(9); +/** + * Exec a command. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param commandLine command to execute (can include additional args). Must be correctly escaped. + * @param args optional arguments for tool. Escaping is handled by the lib. + * @param options optional exec options. See ExecOptions + * @returns Promise exit code + */ +function exec(commandLine, args, options) { + return __awaiter(this, void 0, void 0, function* () { + const commandArgs = tr.argStringToArray(commandLine); + if (commandArgs.length === 0) { + throw new Error(`Parameter 'commandLine' cannot be null or empty.`); + } + // Path to tool to execute should be first arg + const toolPath = commandArgs[0]; + args = commandArgs.slice(1).concat(args || []); + const runner = new tr.ToolRunner(toolPath, args, options); + return runner.exec(); + }); +} +exports.exec = exec; +//# sourceMappingURL=exec.js.map + +/***/ }) + +/******/ }); \ No newline at end of file diff --git a/examples.md b/examples.md index 54c05bb..ca571d8 100644 --- a/examples.md +++ b/examples.md @@ -15,7 +15,7 @@ ## Node - npm ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: node_modules key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} @@ -26,7 +26,7 @@ ## Node - Yarn ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: ~/.cache/yarn key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }} @@ -37,7 +37,7 @@ ## C# - Nuget Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: ~/.nuget/packages key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} @@ -48,7 +48,7 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ## Java - Gradle ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: ~/.gradle/caches key: ${{ runner.os }}-gradle-${{ hashFiles('**/*.gradle') }} @@ -59,7 +59,7 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ## Java - Maven ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: ~/.m2/repository key: ${{ runner.os }}-maven-${{ hashFiles('**/pom.xml') }} @@ -70,7 +70,7 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ## Swift, Objective-C - Carthage ```yaml -uses: actions/cache@preview +uses: actions/cache@v1 with: path: Carthage key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }} @@ -81,7 +81,7 @@ uses: actions/cache@preview ## Swift, Objective-C - CocoaPods ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: Pods key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} @@ -92,7 +92,7 @@ uses: actions/cache@preview ## Ruby - Gem ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: vendor/bundle key: ${{ runner.os }}-gem-${{ hashFiles('**/Gemfile.lock') }} @@ -103,7 +103,7 @@ uses: actions/cache@preview ## Go - Modules ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: ~/go/pkg/mod key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} @@ -113,7 +113,7 @@ uses: actions/cache@preview ## Elixir - Mix ```yaml -- uses: actions/cache@preview +- uses: actions/cache@v1 with: path: deps key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} @@ -125,17 +125,17 @@ uses: actions/cache@preview ``` - name: Cache cargo registry - uses: actions/cache@preview + uses: actions/cache@v1 with: path: ~/.cargo/registry key: ${{ runner.os }}-cargo-registry-${{ hashFiles('**/Cargo.lock') }} - name: Cache cargo index - uses: actions/cache@preview + uses: actions/cache@v1 with: path: ~/.cargo/git key: ${{ runner.os }}-cargo-index-${{ hashFiles('**/Cargo.lock') }} - name: Cache cargo build - uses: actions/cache@preview + uses: actions/cache@v1 with: path: target key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} diff --git a/package-lock.json b/package-lock.json index ed53148..59a5bb6 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "0.0.1", + "version": "1.0.0", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index f5ce058..84b7733 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "0.0.2", + "version": "1.0.0", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", From 0f810ad45a58337c90d1c5e34ab3556bd81f1723 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Tue, 5 Nov 2019 15:42:18 -0500 Subject: [PATCH 02/21] Release v1.0.1 --- README.md | 8 ++- examples.md | 131 +++++++++++++++++++++++----------------------- package-lock.json | 22 ++++---- package.json | 2 +- src/restore.ts | 7 ++- src/save.ts | 4 +- 6 files changed, 91 insertions(+), 83 deletions(-) diff --git a/README.md b/README.md index ccf4bc5..e65cea6 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,11 @@ This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. -GitHub Actions status +GitHub Actions status + +## Documentation + +See ["Caching dependencies to speed up workflows"](https://help.github.com/github/automating-your-workflow-with-github-actions/caching-dependencies-to-speed-up-workflows). ## Usage @@ -59,7 +63,7 @@ See [Examples](examples.md) ## Cache Limits -Individual caches are limited to 200MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. +Individual caches are limited to 400MB and a repository can have up to 2GB of caches. Once the 2GB limit is reached, older caches will be evicted based on when the cache was last accessed. Caches that are not accessed within the last week will also be evicted. ## Skipping steps based on cache-hit diff --git a/examples.md b/examples.md index ca571d8..b844b41 100644 --- a/examples.md +++ b/examples.md @@ -1,41 +1,20 @@ # Examples -- [Node - npm](#node---npm) -- [Node - Yarn](#node---yarn) - [C# - Nuget](#c---nuget) +- [Elixir - Mix](#elixir---mix) +- [Go - Modules](#go---modules) - [Java - Gradle](#java---gradle) - [Java - Maven](#java---maven) +- [Node - npm](#node---npm) +- [Node - Yarn](#node---yarn) +- [Ruby - Gem](#ruby---gem) +- [Rust - Cargo](#rust---cargo) - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) -- [Ruby - Gem](#ruby---gem) -- [Go - Modules](#go---modules) -- [Elixir - Mix](#elixir---mix) -- [Rust - Cargo](#rust---cargo) - -## Node - npm - -```yaml -- uses: actions/cache@v1 - with: - path: node_modules - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- -``` - -## Node - Yarn - -```yaml -- uses: actions/cache@v1 - with: - path: ~/.cache/yarn - key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }} - restore-keys: | - ${{ runner.os }}-yarn- -``` ## C# - Nuget Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): + ```yaml - uses: actions/cache@v1 with: @@ -45,6 +24,27 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ${{ runner.os }}-nuget- ``` +## Elixir - Mix +```yaml +- uses: actions/cache@v1 + with: + path: deps + key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} + restore-keys: | + ${{ runner.os }}-mix- +``` + +## Go - Modules + +```yaml +- uses: actions/cache@v1 + with: + path: ~/go/pkg/mod + key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} + restore-keys: | + ${{ runner.os }}-go- +``` + ## Java - Gradle ```yaml @@ -67,26 +67,26 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ${{ runner.os }}-maven- ``` -## Swift, Objective-C - Carthage - -```yaml -uses: actions/cache@v1 - with: - path: Carthage - key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }} - restore-keys: | - ${{ runner.os }}-carthage- -``` - -## Swift, Objective-C - CocoaPods +## Node - npm ```yaml - uses: actions/cache@v1 with: - path: Pods - key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} + path: node_modules + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | - ${{ runner.os }}-pods- + ${{ runner.os }}-node- +``` + +## Node - Yarn + +```yaml +- uses: actions/cache@v1 + with: + path: ~/.cache/yarn + key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }} + restore-keys: | + ${{ runner.os }}-yarn- ``` ## Ruby - Gem @@ -100,30 +100,9 @@ uses: actions/cache@v1 ${{ runner.os }}-gem- ``` -## Go - Modules - -```yaml -- uses: actions/cache@v1 - with: - path: ~/go/pkg/mod - key: ${{ runner.os }}-go-${{ hashFiles('**/go.sum') }} - restore-keys: | - ${{ runner.os }}-go- -``` - -## Elixir - Mix -```yaml -- uses: actions/cache@v1 - with: - path: deps - key: ${{ runner.os }}-mix-${{ hashFiles(format('{0}{1}', github.workspace, '/mix.lock')) }} - restore-keys: | - ${{ runner.os }}-mix- -``` - ## Rust - Cargo -``` +```yaml - name: Cache cargo registry uses: actions/cache@v1 with: @@ -140,3 +119,25 @@ uses: actions/cache@v1 path: target key: ${{ runner.os }}-cargo-build-target-${{ hashFiles('**/Cargo.lock') }} ``` + +## Swift, Objective-C - Carthage + +```yaml +- uses: actions/cache@v1 + with: + path: Carthage + key: ${{ runner.os }}-carthage-${{ hashFiles('**/Cartfile.resolved') }} + restore-keys: | + ${{ runner.os }}-carthage- +``` + +## Swift, Objective-C - CocoaPods + +```yaml +- uses: actions/cache@v1 + with: + path: Pods + key: ${{ runner.os }}-pods-${{ hashFiles('**/Podfile.lock') }} + restore-keys: | + ${{ runner.os }}-pods- +``` diff --git a/package-lock.json b/package-lock.json index 59a5bb6..a3dc4ea 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.0", + "version": "1.0.1", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -1105,9 +1105,9 @@ } }, "commander": { - "version": "2.20.1", - "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.1.tgz", - "integrity": "sha512-cCuLsMhJeWQ/ZpsFTbE765kvVfoeSddc4nU3up4fV+fDBcfUXnbITJ+JzhkdjzOqhURjZgujxaioam4RM9yGUg==", + "version": "2.20.3", + "resolved": "https://registry.npmjs.org/commander/-/commander-2.20.3.tgz", + "integrity": "sha512-GpVkmM8vF2vQUkj2LvZmD35JxeJOLCwJ9cUkugyk2nuhbv3+mJvpLYYt+0+USMxE+oj+ey/lJEnhZw75x/OMcQ==", "dev": true, "optional": true }, @@ -2318,9 +2318,9 @@ "dev": true }, "handlebars": { - "version": "4.4.2", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.4.2.tgz", - "integrity": "sha512-cIv17+GhL8pHHnRJzGu2wwcthL5sb8uDKBHvZ2Dtu5s1YNt0ljbzKbamnc+gr69y7bzwQiBdr5+hOpRd5pnOdg==", + "version": "4.5.1", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz", + "integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==", "dev": true, "requires": { "neo-async": "^2.6.0", @@ -4981,13 +4981,13 @@ "dev": true }, "uglify-js": { - "version": "3.6.0", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.0.tgz", - "integrity": "sha512-W+jrUHJr3DXKhrsS7NUVxn3zqMOFn0hL/Ei6v0anCIMoKC93TjcflTagwIHLW7SfMFfiQuktQyFVCFHGUE0+yg==", + "version": "3.6.7", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz", + "integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==", "dev": true, "optional": true, "requires": { - "commander": "~2.20.0", + "commander": "~2.20.3", "source-map": "~0.6.1" } }, diff --git a/package.json b/package.json index 84b7733..a235bd7 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.0", + "version": "1.0.1", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", diff --git a/src/restore.ts b/src/restore.ts index b115b08..060c8d4 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -20,7 +20,10 @@ async function run() { const primaryKey = core.getInput(Inputs.Key, { required: true }); core.saveState(State.CacheKey, primaryKey); - const restoreKeys = core.getInput(Inputs.RestoreKeys).split("\n"); + const restoreKeys = core + .getInput(Inputs.RestoreKeys) + .split("\n") + .filter(x => x !== ""); const keys = [primaryKey, ...restoreKeys]; core.debug("Resolved Keys:"); @@ -52,7 +55,7 @@ async function run() { const cacheEntry = await cacheHttpClient.getCacheEntry(keys); if (!cacheEntry) { core.info( - `Cache not found for input keys: ${JSON.stringify(keys)}.` + `Cache not found for input keys: ${keys.join(", ")}.` ); return; } diff --git a/src/save.ts b/src/save.ts index f688e70..69e44cf 100644 --- a/src/save.ts +++ b/src/save.ts @@ -54,12 +54,12 @@ async function run() { core.debug(`Tar Path: ${tarPath}`); await exec(`"${tarPath}"`, args); - const fileSizeLimit = 200 * 1024 * 1024; // 200MB + const fileSizeLimit = 400 * 1024 * 1024; // 400MB const archiveFileSize = fs.statSync(archivePath).size; core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { core.warning( - `Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.` + `Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.` ); return; } From 86dff562ab522d4b40945cd4cd51c5d4b4b40cbc Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Tue, 5 Nov 2019 15:43:33 -0500 Subject: [PATCH 03/21] v1.0.1 release binaries --- dist/restore/index.js | 7 +++++-- dist/save/index.js | 4 ++-- 2 files changed, 7 insertions(+), 4 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 6074ef1..53f1ec6 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2972,7 +2972,10 @@ function run() { core.debug(`Cache Path: ${cachePath}`); const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true }); core.saveState(constants_1.State.CacheKey, primaryKey); - const restoreKeys = core.getInput(constants_1.Inputs.RestoreKeys).split("\n"); + const restoreKeys = core + .getInput(constants_1.Inputs.RestoreKeys) + .split("\n") + .filter(x => x !== ""); const keys = [primaryKey, ...restoreKeys]; core.debug("Resolved Keys:"); core.debug(JSON.stringify(keys)); @@ -2994,7 +2997,7 @@ function run() { try { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); if (!cacheEntry) { - core.info(`Cache not found for input keys: ${JSON.stringify(keys)}.`); + core.info(`Cache not found for input keys: ${keys.join(", ")}.`); return; } let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); diff --git a/dist/save/index.js b/dist/save/index.js index 049a5c8..79c5e1c 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2889,11 +2889,11 @@ function run() { const tarPath = yield io.which("tar", true); core.debug(`Tar Path: ${tarPath}`); yield exec_1.exec(`"${tarPath}"`, args); - const fileSizeLimit = 200 * 1024 * 1024; // 200MB + const fileSizeLimit = 400 * 1024 * 1024; // 400MB const archiveFileSize = fs.statSync(archivePath).size; core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { - core.warning(`Cache size of ${archiveFileSize} bytes is over the 200MB limit, not saving cache.`); + core.warning(`Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.`); return; } const stream = fs.createReadStream(archivePath); From 6491e51b664cab00918c9cf51a5b9705d94ff877 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Fri, 15 Nov 2019 10:29:58 -0500 Subject: [PATCH 04/21] Merge master into releases/v1 --- .eslintrc.json | 16 + .github/workflows/workflow.yml | 26 +- README.md | 25 +- __tests__/__fixtures__/helloWorld.txt | 1 + __tests__/actionUtils.test.ts | 226 ++++++ __tests__/main.test.ts | 22 - __tests__/restore.test.ts | 450 +++++++++++ __tests__/save.test.ts | 329 ++++++++ examples.md | 157 +++- jest.config.js | 39 +- package-lock.json | 1023 +++++++++++++++++++++++++ package.json | 10 + src/cacheHttpClient.ts | 90 +-- src/constants.ts | 24 +- src/restore.ts | 52 +- src/save.ts | 37 +- src/utils/actionUtils.ts | 41 +- src/utils/testUtils.ts | 24 +- 18 files changed, 2421 insertions(+), 171 deletions(-) create mode 100644 .eslintrc.json create mode 100644 __tests__/__fixtures__/helloWorld.txt create mode 100644 __tests__/actionUtils.test.ts delete mode 100644 __tests__/main.test.ts create mode 100644 __tests__/restore.test.ts create mode 100644 __tests__/save.test.ts diff --git a/.eslintrc.json b/.eslintrc.json new file mode 100644 index 0000000..ccaf1a6 --- /dev/null +++ b/.eslintrc.json @@ -0,0 +1,16 @@ +{ + "env": { "node": true, "jest": true }, + "parser": "@typescript-eslint/parser", + "parserOptions": { "ecmaVersion": 2020, "sourceType": "module" }, + "extends": [ + "eslint:recommended", + "plugin:@typescript-eslint/eslint-recommended", + "plugin:@typescript-eslint/recommended", + "plugin:import/errors", + "plugin:import/warnings", + "plugin:import/typescript", + "plugin:prettier/recommended", + "prettier/@typescript-eslint" + ], + "plugins": ["@typescript-eslint", "jest"] +} diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index 59bb21f..b31fdc1 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -1,6 +1,11 @@ name: Tests + on: pull_request: + branches: + - master + paths-ignore: + - '**.md' push: branches: - master @@ -10,22 +15,39 @@ on: jobs: test: name: Test on ${{ matrix.os }} + strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] + runs-on: ${{ matrix.os }} - + steps: - uses: actions/checkout@v1 - + - uses: actions/setup-node@v1 with: node-version: '12.x' + - name: Get npm cache directory + id: npm-cache + run: | + echo "::set-output name=dir::$(npm config get cache)" + + - uses: actions/cache@v1 + with: + path: ${{ steps.npm-cache.outputs.dir }} + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- + - run: npm ci - name: Prettier Format Check run: npm run format-check + - name: ESLint Check + run: npm run lint + - name: Build & Test run: npm run test diff --git a/README.md b/README.md index e65cea6..210c51d 100644 --- a/README.md +++ b/README.md @@ -28,7 +28,7 @@ Create a workflow `.yml` file in your repositories `.github/workflows` directory ### Example workflow ```yaml -name: Example Caching with npm +name: Caching Primes on: push @@ -39,22 +39,19 @@ jobs: steps: - uses: actions/checkout@v1 - - name: Cache node modules + - name: Cache Primes + id: cache-primes uses: actions/cache@v1 with: - path: node_modules - key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} - restore-keys: | - ${{ runner.os }}-node- + path: prime-numbers + key: ${{ runner.os }}-primes - - name: Install Dependencies - run: npm install + - name: Generate Prime Numbers + if: steps.cache-primes.outputs.cache-hit != 'true' + run: /generate-primes.sh -d prime-numbers - - name: Build - run: npm run build - - - name: Test - run: npm run test + - name: Use Prime Numbers + run: /primes.sh -d prime-numbers ``` ## Ecosystem Examples @@ -78,7 +75,7 @@ steps: id: cache with: path: path/to/dependencies - key: ${{ runner.os }}-${{ hashFiles('**/lockfiles')}} + key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' diff --git a/__tests__/__fixtures__/helloWorld.txt b/__tests__/__fixtures__/helloWorld.txt new file mode 100644 index 0000000..95d09f2 --- /dev/null +++ b/__tests__/__fixtures__/helloWorld.txt @@ -0,0 +1 @@ +hello world \ No newline at end of file diff --git a/__tests__/actionUtils.test.ts b/__tests__/actionUtils.test.ts new file mode 100644 index 0000000..4688b5d --- /dev/null +++ b/__tests__/actionUtils.test.ts @@ -0,0 +1,226 @@ +import * as core from "@actions/core"; +import * as os from "os"; +import * as path from "path"; + +import { Events, Outputs, State } from "../src/constants"; +import { ArtifactCacheEntry } from "../src/contracts"; +import * as actionUtils from "../src/utils/actionUtils"; + +jest.mock("@actions/core"); +jest.mock("os"); + +afterEach(() => { + delete process.env[Events.Key]; +}); + +test("getArchiveFileSize returns file size", () => { + const filePath = path.join(__dirname, "__fixtures__", "helloWorld.txt"); + + const size = actionUtils.getArchiveFileSize(filePath); + + expect(size).toBe(11); +}); + +test("isExactKeyMatch with undefined cache entry returns false", () => { + const key = "linux-rust"; + const cacheEntry = undefined; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); +}); + +test("isExactKeyMatch with empty cache entry returns false", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = {}; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); +}); + +test("isExactKeyMatch with different keys returns false", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "linux-" + }; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); +}); + +test("isExactKeyMatch with different key accents returns false", () => { + const key = "linux-áccent"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "linux-accent" + }; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(false); +}); + +test("isExactKeyMatch with same key returns true", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "linux-rust" + }; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); +}); + +test("isExactKeyMatch with same key and different casing returns true", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "LINUX-RUST" + }; + + expect(actionUtils.isExactKeyMatch(key, cacheEntry)).toBe(true); +}); + +test("setOutputAndState with undefined entry to set cache-hit output", () => { + const key = "linux-rust"; + const cacheEntry = undefined; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheEntry); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledTimes(0); +}); + +test("setOutputAndState with exact match to set cache-hit output and state", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "linux-rust" + }; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheEntry); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "true"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledWith( + State.CacheResult, + JSON.stringify(cacheEntry) + ); + expect(saveStateMock).toHaveBeenCalledTimes(1); +}); + +test("setOutputAndState with no exact match to set cache-hit output and state", () => { + const key = "linux-rust"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "linux-rust-bb828da54c148048dd17899ba9fda624811cfb43" + }; + + const setOutputMock = jest.spyOn(core, "setOutput"); + const saveStateMock = jest.spyOn(core, "saveState"); + + actionUtils.setOutputAndState(key, cacheEntry); + + expect(setOutputMock).toHaveBeenCalledWith(Outputs.CacheHit, "false"); + expect(setOutputMock).toHaveBeenCalledTimes(1); + + expect(saveStateMock).toHaveBeenCalledWith( + State.CacheResult, + JSON.stringify(cacheEntry) + ); + expect(saveStateMock).toHaveBeenCalledTimes(1); +}); + +test("getCacheState with no state returns undefined", () => { + const getStateMock = jest.spyOn(core, "getState"); + getStateMock.mockImplementation(() => { + return ""; + }); + + const state = actionUtils.getCacheState(); + + expect(state).toBe(undefined); + + expect(getStateMock).toHaveBeenCalledWith(State.CacheResult); + expect(getStateMock).toHaveBeenCalledTimes(1); +}); + +test("getCacheState with valid state", () => { + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + const getStateMock = jest.spyOn(core, "getState"); + getStateMock.mockImplementation(() => { + return JSON.stringify(cacheEntry); + }); + + const state = actionUtils.getCacheState(); + + expect(state).toEqual(cacheEntry); + + expect(getStateMock).toHaveBeenCalledWith(State.CacheResult); + expect(getStateMock).toHaveBeenCalledTimes(1); +}); + +test("isValidEvent returns false for unknown event", () => { + const event = "foo"; + process.env[Events.Key] = event; + + const isValidEvent = actionUtils.isValidEvent(); + + expect(isValidEvent).toBe(false); +}); + +test("resolvePath with no ~ in path", () => { + const filePath = ".cache/yarn"; + + const resolvedPath = actionUtils.resolvePath(filePath); + + const expectedPath = path.resolve(filePath); + expect(resolvedPath).toBe(expectedPath); +}); + +test("resolvePath with ~ in path", () => { + const filePath = "~/.cache/yarn"; + + const homedir = jest.requireActual("os").homedir(); + const homedirMock = jest.spyOn(os, "homedir"); + homedirMock.mockImplementation(() => { + return homedir; + }); + + const resolvedPath = actionUtils.resolvePath(filePath); + + const expectedPath = path.join(homedir, ".cache/yarn"); + expect(resolvedPath).toBe(expectedPath); +}); + +test("resolvePath with home not found", () => { + const filePath = "~/.cache/yarn"; + const homedirMock = jest.spyOn(os, "homedir"); + homedirMock.mockImplementation(() => { + return ""; + }); + + expect(() => actionUtils.resolvePath(filePath)).toThrow( + "Unable to resolve `~` to HOME" + ); +}); + +test("isValidEvent returns true for push event", () => { + const event = Events.Push; + process.env[Events.Key] = event; + + const isValidEvent = actionUtils.isValidEvent(); + + expect(isValidEvent).toBe(true); +}); + +test("isValidEvent returns true for pull request event", () => { + const event = Events.PullRequest; + process.env[Events.Key] = event; + + const isValidEvent = actionUtils.isValidEvent(); + + expect(isValidEvent).toBe(true); +}); diff --git a/__tests__/main.test.ts b/__tests__/main.test.ts deleted file mode 100644 index 074a5e7..0000000 --- a/__tests__/main.test.ts +++ /dev/null @@ -1,22 +0,0 @@ -import * as core from "@actions/core"; - -import { Inputs } from "../src/constants"; -import run from "../src/restore"; -import * as testUtils from "../src/utils/testUtils"; - -test("restore with no path", async () => { - const failedMock = jest.spyOn(core, "setFailed"); - await run(); - expect(failedMock).toHaveBeenCalledWith( - "Input required and not supplied: path" - ); -}); - -test("restore with no key", async () => { - testUtils.setInput(Inputs.Path, "node_modules"); - const failedMock = jest.spyOn(core, "setFailed"); - await run(); - expect(failedMock).toHaveBeenCalledWith( - "Input required and not supplied: key" - ); -}); diff --git a/__tests__/restore.test.ts b/__tests__/restore.test.ts new file mode 100644 index 0000000..1919e30 --- /dev/null +++ b/__tests__/restore.test.ts @@ -0,0 +1,450 @@ +import * as core from "@actions/core"; +import * as exec from "@actions/exec"; +import * as io from "@actions/io"; +import * as path from "path"; +import * as cacheHttpClient from "../src/cacheHttpClient"; +import { Events, Inputs } from "../src/constants"; +import { ArtifactCacheEntry } from "../src/contracts"; +import run from "../src/restore"; +import * as actionUtils from "../src/utils/actionUtils"; +import * as testUtils from "../src/utils/testUtils"; + +jest.mock("@actions/exec"); +jest.mock("@actions/io"); +jest.mock("../src/utils/actionUtils"); +jest.mock("../src/cacheHttpClient"); + +beforeAll(() => { + jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { + return path.resolve(filePath); + }); + + jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( + (key, cacheResult) => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isExactKeyMatch(key, cacheResult); + } + ); + + jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isValidEvent(); + }); + + jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.getSupportedEvents(); + }); + + jest.spyOn(io, "which").mockImplementation(tool => { + return Promise.resolve(tool); + }); +}); + +beforeEach(() => { + process.env[Events.Key] = Events.Push; +}); + +afterEach(() => { + testUtils.clearInputs(); + delete process.env[Events.Key]; +}); + +test("restore with invalid event", async () => { + const failedMock = jest.spyOn(core, "setFailed"); + const invalidEvent = "commit_comment"; + process.env[Events.Key] = invalidEvent; + await run(); + expect(failedMock).toHaveBeenCalledWith( + `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` + ); +}); + +test("restore with no path should fail", async () => { + const failedMock = jest.spyOn(core, "setFailed"); + await run(); + expect(failedMock).toHaveBeenCalledWith( + "Input required and not supplied: path" + ); +}); + +test("restore with no key", async () => { + testUtils.setInput(Inputs.Path, "node_modules"); + const failedMock = jest.spyOn(core, "setFailed"); + await run(); + expect(failedMock).toHaveBeenCalledWith( + "Input required and not supplied: key" + ); +}); + +test("restore with too many keys should fail", async () => { + const key = "node-test"; + const restoreKeys = [...Array(20).keys()].map(x => x.toString()); + testUtils.setInputs({ + path: "node_modules", + key, + restoreKeys + }); + const failedMock = jest.spyOn(core, "setFailed"); + await run(); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: Keys are limited to a maximum of 10.` + ); +}); + +test("restore with large key should fail", async () => { + const key = "foo".repeat(512); // Over the 512 character limit + testUtils.setInputs({ + path: "node_modules", + key + }); + const failedMock = jest.spyOn(core, "setFailed"); + await run(); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot be larger than 512 characters.` + ); +}); + +test("restore with invalid key should fail", async () => { + const key = "comma,comma"; + testUtils.setInputs({ + path: "node_modules", + key + }); + const failedMock = jest.spyOn(core, "setFailed"); + await run(); + expect(failedMock).toHaveBeenCalledWith( + `Key Validation Error: ${key} cannot contain commas.` + ); +}); + +test("restore with no cache found", async () => { + const key = "node-test"; + testUtils.setInputs({ + path: "node_modules", + key + }); + + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + clientMock.mockImplementation(() => { + return Promise.resolve(null); + }); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); + + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}.` + ); +}); + +test("restore with server error should fail", async () => { + const key = "node-test"; + testUtils.setInputs({ + path: "node_modules", + key + }); + + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + clientMock.mockImplementation(() => { + throw new Error("HTTP Error Occurred"); + }); + + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + + expect(warningMock).toHaveBeenCalledTimes(1); + expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with restore keys and no cache found", async () => { + const key = "node-test"; + const restoreKey = "node-"; + testUtils.setInputs({ + path: "node_modules", + key, + restoreKeys: [restoreKey] + }); + + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const clientMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + clientMock.mockImplementation(() => { + return Promise.resolve(null); + }); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); + + expect(infoMock).toHaveBeenCalledWith( + `Cache not found for input keys: ${key}, ${restoreKey}.` + ); +}); + +test("restore with cache found", async () => { + const key = "node-test"; + const cachePath = path.resolve("node_modules"); + testUtils.setInputs({ + path: "node_modules", + key + }); + + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: "refs/heads/master", + archiveLocation: "www.actionscache.test/download" + }; + const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + getCacheMock.mockImplementation(() => { + return Promise.resolve(cacheEntry); + }); + const tempPath = "/foo/bar"; + + const createTempDirectoryMock = jest.spyOn( + actionUtils, + "createTempDirectory" + ); + createTempDirectoryMock.mockImplementation(() => { + return Promise.resolve(tempPath); + }); + + const archivePath = path.join(tempPath, "cache.tgz"); + const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); + const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); + + const fileSize = 142; + const getArchiveFileSizeMock = jest + .spyOn(actionUtils, "getArchiveFileSize") + .mockReturnValue(fileSize); + + const mkdirMock = jest.spyOn(io, "mkdirP"); + const execMock = jest.spyOn(exec, "exec"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(getCacheMock).toHaveBeenCalledWith([key]); + expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); + expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); + expect(mkdirMock).toHaveBeenCalledWith(cachePath); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-xz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/") + ] + : ["-xz", "-f", archivePath, "-C", cachePath]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with a pull request event and cache found", async () => { + const key = "node-test"; + const cachePath = path.resolve("node_modules"); + testUtils.setInputs({ + path: "node_modules", + key + }); + + process.env[Events.Key] = Events.PullRequest; + + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: key, + scope: "refs/heads/master", + archiveLocation: "www.actionscache.test/download" + }; + const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + getCacheMock.mockImplementation(() => { + return Promise.resolve(cacheEntry); + }); + const tempPath = "/foo/bar"; + + const createTempDirectoryMock = jest.spyOn( + actionUtils, + "createTempDirectory" + ); + createTempDirectoryMock.mockImplementation(() => { + return Promise.resolve(tempPath); + }); + + const archivePath = path.join(tempPath, "cache.tgz"); + const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); + const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); + + const fileSize = 62915000; + const getArchiveFileSizeMock = jest + .spyOn(actionUtils, "getArchiveFileSize") + .mockReturnValue(fileSize); + + const mkdirMock = jest.spyOn(io, "mkdirP"); + const execMock = jest.spyOn(exec, "exec"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(getCacheMock).toHaveBeenCalledWith([key]); + expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); + expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); + expect(mkdirMock).toHaveBeenCalledWith(cachePath); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-xz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/") + ] + : ["-xz", "-f", archivePath, "-C", cachePath]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); + + expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("restore with cache found for restore key", async () => { + const key = "node-test"; + const restoreKey = "node-"; + const cachePath = path.resolve("node_modules"); + testUtils.setInputs({ + path: "node_modules", + key, + restoreKeys: [restoreKey] + }); + + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + const stateMock = jest.spyOn(core, "saveState"); + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: restoreKey, + scope: "refs/heads/master", + archiveLocation: "www.actionscache.test/download" + }; + const getCacheMock = jest.spyOn(cacheHttpClient, "getCacheEntry"); + getCacheMock.mockImplementation(() => { + return Promise.resolve(cacheEntry); + }); + const tempPath = "/foo/bar"; + + const createTempDirectoryMock = jest.spyOn( + actionUtils, + "createTempDirectory" + ); + createTempDirectoryMock.mockImplementation(() => { + return Promise.resolve(tempPath); + }); + + const archivePath = path.join(tempPath, "cache.tgz"); + const setCacheStateMock = jest.spyOn(actionUtils, "setCacheState"); + const downloadCacheMock = jest.spyOn(cacheHttpClient, "downloadCache"); + + const fileSize = 142; + const getArchiveFileSizeMock = jest + .spyOn(actionUtils, "getArchiveFileSize") + .mockReturnValue(fileSize); + + const mkdirMock = jest.spyOn(io, "mkdirP"); + const execMock = jest.spyOn(exec, "exec"); + const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); + + await run(); + + expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); + expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); + expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); + expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); + expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); + expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); + expect(mkdirMock).toHaveBeenCalledWith(cachePath); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-xz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/") + ] + : ["-xz", "-f", archivePath, "-C", cachePath]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); + expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); + + expect(infoMock).toHaveBeenCalledWith( + `Cache restored from key: ${restoreKey}` + ); + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts new file mode 100644 index 0000000..67b13d2 --- /dev/null +++ b/__tests__/save.test.ts @@ -0,0 +1,329 @@ +import * as core from "@actions/core"; +import * as exec from "@actions/exec"; +import * as io from "@actions/io"; +import * as path from "path"; +import * as cacheHttpClient from "../src/cacheHttpClient"; +import { Inputs } from "../src/constants"; +import { ArtifactCacheEntry } from "../src/contracts"; +import run from "../src/save"; +import * as actionUtils from "../src/utils/actionUtils"; +import * as testUtils from "../src/utils/testUtils"; + +jest.mock("@actions/core"); +jest.mock("@actions/exec"); +jest.mock("@actions/io"); +jest.mock("../src/utils/actionUtils"); +jest.mock("../src/cacheHttpClient"); + +beforeAll(() => { + jest.spyOn(core, "getInput").mockImplementation((name, options) => { + return jest.requireActual("@actions/core").getInput(name, options); + }); + + jest.spyOn(actionUtils, "getCacheState").mockImplementation(() => { + return jest.requireActual("../src/utils/actionUtils").getCacheState(); + }); + + jest.spyOn(actionUtils, "isExactKeyMatch").mockImplementation( + (key, cacheResult) => { + return jest + .requireActual("../src/utils/actionUtils") + .isExactKeyMatch(key, cacheResult); + } + ); + + jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { + return path.resolve(filePath); + }); + + jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { + return Promise.resolve("/foo/bar"); + }); + + jest.spyOn(io, "which").mockImplementation(tool => { + return Promise.resolve(tool); + }); +}); + +afterEach(() => { + testUtils.clearInputs(); +}); + +test("save with no primary key in state outputs warning", async () => { + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return ""; + }); + + await run(); + + expect(warningMock).toHaveBeenCalledWith( + `Error retrieving key from state.` + ); + expect(warningMock).toHaveBeenCalledTimes(1); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with exact match returns early", async () => { + const infoMock = jest.spyOn(core, "info"); + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: primaryKey, + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const execMock = jest.spyOn(exec, "exec"); + + await run(); + + expect(infoMock).toHaveBeenCalledWith( + `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` + ); + + expect(execMock).toHaveBeenCalledTimes(0); + + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with missing input outputs warning", async () => { + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + await run(); + + expect(warningMock).toHaveBeenCalledWith( + "Input required and not supplied: path" + ); + expect(warningMock).toHaveBeenCalledTimes(1); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with large cache outputs warning", async () => { + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + const cachePath = path.resolve(inputPath); + testUtils.setInput(Inputs.Path, inputPath); + + const execMock = jest.spyOn(exec, "exec"); + + const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit + jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { + return cacheSize; + }); + + await run(); + + const archivePath = path.join("/foo/bar", "cache.tgz"); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-cz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/"), + "." + ] + : ["-cz", "-f", archivePath, "-C", cachePath, "."]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(warningMock).toHaveBeenCalledTimes(1); + expect(warningMock).toHaveBeenCalledWith( + "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." + ); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with server error outputs warning", async () => { + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + const cachePath = path.resolve(inputPath); + testUtils.setInput(Inputs.Path, inputPath); + + const execMock = jest.spyOn(exec, "exec"); + + const saveCacheMock = jest + .spyOn(cacheHttpClient, "saveCache") + .mockImplementationOnce(() => { + throw new Error("HTTP Error Occurred"); + }); + + await run(); + + const archivePath = path.join("/foo/bar", "cache.tgz"); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-cz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/"), + "." + ] + : ["-cz", "-f", archivePath, "-C", cachePath, "."]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); + + expect(warningMock).toHaveBeenCalledTimes(1); + expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + + expect(failedMock).toHaveBeenCalledTimes(0); +}); + +test("save with valid inputs uploads a cache", async () => { + const warningMock = jest.spyOn(core, "warning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + const cachePath = path.resolve(inputPath); + testUtils.setInput(Inputs.Path, inputPath); + + const execMock = jest.spyOn(exec, "exec"); + + const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); + + await run(); + + const archivePath = path.join("/foo/bar", "cache.tgz"); + + const IS_WINDOWS = process.platform === "win32"; + const args = IS_WINDOWS + ? [ + "-cz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/"), + "." + ] + : ["-cz", "-f", archivePath, "-C", cachePath, "."]; + + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + + expect(saveCacheMock).toHaveBeenCalledTimes(1); + expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); + + expect(warningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); diff --git a/examples.md b/examples.md index b844b41..7d66b62 100644 --- a/examples.md +++ b/examples.md @@ -1,16 +1,25 @@ # Examples -- [C# - Nuget](#c---nuget) -- [Elixir - Mix](#elixir---mix) -- [Go - Modules](#go---modules) -- [Java - Gradle](#java---gradle) -- [Java - Maven](#java---maven) -- [Node - npm](#node---npm) -- [Node - Yarn](#node---yarn) -- [Ruby - Gem](#ruby---gem) -- [Rust - Cargo](#rust---cargo) -- [Swift, Objective-C - Carthage](#swift-objective-c---carthage) -- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) +- [Examples](#examples) + - [C# - Nuget](#c---nuget) + - [Elixir - Mix](#elixir---mix) + - [Go - Modules](#go---modules) + - [Java - Gradle](#java---gradle) + - [Java - Maven](#java---maven) + - [Node - npm](#node---npm) + - [macOS and Ubuntu](#macos-and-ubuntu) + - [Windows](#windows) + - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config) + - [Node - Yarn](#node---yarn) + - [PHP - Composer](#php---composer) + - [Python - pip](#python---pip) + - [Simple example](#simple-example) + - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow) + - [Using a script to get cache location](#using-a-script-to-get-cache-location) + - [Ruby - Gem](#ruby---gem) + - [Rust - Cargo](#rust---cargo) + - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) + - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) ## C# - Nuget Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): @@ -69,24 +78,142 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ## Node - npm +For npm, cache files are stored in `~/.npm` on Posix, or `%AppData%/npm-cache` on Windows. See https://docs.npmjs.com/cli/cache#cache + +>Note: It is not recommended to cache `node_modules`, as it can break across Node versions and won't work with `npm ci` + +### macOS and Ubuntu + ```yaml - uses: actions/cache@v1 with: - path: node_modules + path: ~/.npm + key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- +``` + +### Windows + +```yaml +- uses: actions/cache@v1 + with: + path: ~\AppData\Roaming\npm-cache + key: ${{ runner.os }}-node-${{ hashFiles('**\package-lock.json') }} + restore-keys: | + ${{ runner.os }}-node- +``` + +### Using multiple systems and `npm config` + +```yaml +- name: Get npm cache directory + id: npm-cache + run: | + echo "::set-output name=dir::$(npm config get cache)" +- uses: actions/cache@v1 + with: + path: ${{ steps.npm-cache.outputs.dir }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- ``` ## Node - Yarn +The yarn cache directory will depend on your operating system and version of `yarn`. See https://yarnpkg.com/lang/en/docs/cli/cache/ for more info. +```yaml +- name: Get yarn cache + id: yarn-cache + run: echo "::set-output name=dir::$(yarn cache dir)" + +- uses: actions/cache@v1 + with: + path: ${{ steps.yarn-cache.outputs.dir }} + key: ${{ runner.os }}-yarn-${{ hashFiles('**/yarn.lock') }} + restore-keys: | + ${{ runner.os }}-yarn- +``` + +## PHP - Composer + +```yaml +- name: Get Composer Cache Directory + id: composer-cache + run: | + echo "::set-output name=dir::$(composer config cache-files-dir)" +- uses: actions/cache@v1 + with: + path: ${{ steps.composer-cache.outputs.dir }} + key: ${{ runner.os }}-composer-${{ hashFiles('**/composer.lock') }} + restore-keys: | + ${{ runner.os }}-composer- +``` + +## Python - pip + +For pip, the cache directory will vary by OS. See https://pip.pypa.io/en/stable/reference/pip_install/#caching + +Locations: + - Ubuntu: `~/.cache/pip` + - Windows: `~\AppData\Local\pip\Cache` + - macOS: `~/Library/Caches/pip` + +### Simple example ```yaml - uses: actions/cache@v1 with: - path: ~/.cache/yarn - key: ${{ runner.os }}-yarn-${{ hashFiles(format('{0}{1}', github.workspace, '/yarn.lock')) }} + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} restore-keys: | - ${{ runner.os }}-yarn- + ${{ runner.os }}-pip- +``` + +Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. + +### Multiple OS's in a workflow + +```yaml +- uses: actions/cache@v1 + if: startsWith(runner.os, 'Linux') + with: + path: ~/.cache/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + +- uses: actions/cache@v1 + if: startsWith(runner.os, 'macOS') + with: + path: ~/Library/Caches/pip + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- + +- uses: actions/cache@v1 + if: startsWith(runner.os, 'Windows') + with: + path: ~\AppData\Local\pip\Cache + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- +``` + +### Using a script to get cache location + +> Note: This uses an internal pip API and may not always work +```yaml +- name: Get pip cache + id: pip-cache + run: | + python -c "from pip._internal.locations import USER_CACHE_DIR; print('::set-output name=dir::' + USER_CACHE_DIR)" + +- uses: actions/cache@v1 + with: + path: ${{ steps.pip-cache.outputs.dir }} + key: ${{ runner.os }}-pip-${{ hashFiles('**/requirements.txt') }} + restore-keys: | + ${{ runner.os }}-pip- ``` ## Ruby - Gem diff --git a/jest.config.js b/jest.config.js index 42e7e56..59548f6 100644 --- a/jest.config.js +++ b/jest.config.js @@ -1,20 +1,23 @@ -module.exports = { - clearMocks: true, - moduleFileExtensions: ['js', 'ts'], - testEnvironment: 'node', - testMatch: ['**/*.test.ts'], - testRunner: 'jest-circus/runner', - transform: { - '^.+\\.ts$': 'ts-jest' - }, - verbose: true -} +require("nock").disableNetConnect(); -const processStdoutWrite = process.stdout.write.bind(process.stdout) +module.exports = { + clearMocks: true, + moduleFileExtensions: ["js", "ts"], + testEnvironment: "node", + testMatch: ["**/*.test.ts"], + testRunner: "jest-circus/runner", + transform: { + "^.+\\.ts$": "ts-jest" + }, + verbose: true +}; + +const processStdoutWrite = process.stdout.write.bind(process.stdout); +// eslint-disable-next-line @typescript-eslint/explicit-function-return-type process.stdout.write = (str, encoding, cb) => { - // Core library will directly call process.stdout.write for commands - // We don't want :: commands to be executed by the runner during tests - if (!str.match(/^::/)) { - return processStdoutWrite(str, encoding, cb); - } -} \ No newline at end of file + // Core library will directly call process.stdout.write for commands + // We don't want :: commands to be executed by the runner during tests + if (!str.match(/^::/)) { + return processStdoutWrite(str, encoding, cb); + } +}; diff --git a/package-lock.json b/package-lock.json index a3dc4ea..605ecc4 100644 --- a/package-lock.json +++ b/package-lock.json @@ -477,6 +477,12 @@ "@babel/types": "^7.3.0" } }, + "@types/eslint-visitor-keys": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/@types/eslint-visitor-keys/-/eslint-visitor-keys-1.0.0.tgz", + "integrity": "sha512-OCutwjDZ4aFS6PB1UZ988C4YgwlBHJd6wCeQqaLdmadZ/7e+w79+hbMUFC1QXDNCmdyoRfAFdm0RypzwR+Qpag==", + "dev": true + }, "@types/istanbul-lib-coverage": { "version": "2.0.1", "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.1.tgz", @@ -517,6 +523,21 @@ "integrity": "sha512-yALhelO3i0hqZwhjtcr6dYyaLoCHbAMshwtj6cGxTvHZAKXHsYGdff6E8EPw3xLKY0ELUTQ69Q1rQiJENnccMA==", "dev": true }, + "@types/json-schema": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/@types/json-schema/-/json-schema-7.0.3.tgz", + "integrity": "sha512-Il2DtDVRGDcqjDtE+rF8iqg1CArehSK84HZJCT7AMITlyXRBpuPhqGLDQMowraqqu1coEaimg4ZOqggt6L6L+A==", + "dev": true + }, + "@types/nock": { + "version": "11.1.0", + "resolved": "https://registry.npmjs.org/@types/nock/-/nock-11.1.0.tgz", + "integrity": "sha512-jI/ewavBQ7X5178262JQR0ewicPAcJhXS/iFaNJl0VHLfyosZ/kwSrsa6VNQNSO8i9d8SqdRgOtZSOKJ/+iNMw==", + "dev": true, + "requires": { + "nock": "*" + } + }, "@types/node": { "version": "12.6.9", "resolved": "https://registry.npmjs.org/@types/node/-/node-12.6.9.tgz", @@ -544,6 +565,73 @@ "integrity": "sha512-SOhuU4wNBxhhTHxYaiG5NY4HBhDIDnJF60GU+2LqHAdKKer86//e4yg69aENCtQ04n0ovz+tq2YPME5t5yp4pw==", "dev": true }, + "@typescript-eslint/eslint-plugin": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/eslint-plugin/-/eslint-plugin-2.7.0.tgz", + "integrity": "sha512-H5G7yi0b0FgmqaEUpzyBlVh0d9lq4cWG2ap0RKa6BkF3rpBb6IrAoubt1NWh9R2kRs/f0k6XwRDiDz3X/FqXhQ==", + "dev": true, + "requires": { + "@typescript-eslint/experimental-utils": "2.7.0", + "eslint-utils": "^1.4.2", + "functional-red-black-tree": "^1.0.1", + "regexpp": "^2.0.1", + "tsutils": "^3.17.1" + } + }, + "@typescript-eslint/experimental-utils": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/experimental-utils/-/experimental-utils-2.7.0.tgz", + "integrity": "sha512-9/L/OJh2a5G2ltgBWJpHRfGnt61AgDeH6rsdg59BH0naQseSwR7abwHq3D5/op0KYD/zFT4LS5gGvWcMmegTEg==", + "dev": true, + "requires": { + "@types/json-schema": "^7.0.3", + "@typescript-eslint/typescript-estree": "2.7.0", + "eslint-scope": "^5.0.0" + } + }, + "@typescript-eslint/parser": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/parser/-/parser-2.7.0.tgz", + "integrity": "sha512-ctC0g0ZvYclxMh/xI+tyqP0EC2fAo6KicN9Wm2EIao+8OppLfxji7KAGJosQHSGBj3TcqUrA96AjgXuKa5ob2g==", + "dev": true, + "requires": { + "@types/eslint-visitor-keys": "^1.0.0", + "@typescript-eslint/experimental-utils": "2.7.0", + "@typescript-eslint/typescript-estree": "2.7.0", + "eslint-visitor-keys": "^1.1.0" + } + }, + "@typescript-eslint/typescript-estree": { + "version": "2.7.0", + "resolved": "https://registry.npmjs.org/@typescript-eslint/typescript-estree/-/typescript-estree-2.7.0.tgz", + "integrity": "sha512-vVCE/DY72N4RiJ/2f10PTyYekX2OLaltuSIBqeHYI44GQ940VCYioInIb8jKMrK9u855OEJdFC+HmWAZTnC+Ag==", + "dev": true, + "requires": { + "debug": "^4.1.1", + "glob": "^7.1.4", + "is-glob": "^4.0.1", + "lodash.unescape": "4.0.1", + "semver": "^6.3.0", + "tsutils": "^3.17.1" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, "@zeit/ncc": { "version": "0.20.5", "resolved": "https://registry.npmjs.org/@zeit/ncc/-/ncc-0.20.5.tgz", @@ -580,6 +668,12 @@ } } }, + "acorn-jsx": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/acorn-jsx/-/acorn-jsx-5.1.0.tgz", + "integrity": "sha512-tMUqwBWfLFbJbizRmEcWSLw6HnFzfdJs2sOJEOwwtVPMoH/0Ay+E703oZz78VSXZiiDcZrQ5XKjPIUQixhmgVw==", + "dev": true + }, "acorn-walk": { "version": "6.2.0", "resolved": "https://registry.npmjs.org/acorn-walk/-/acorn-walk-6.2.0.tgz", @@ -629,6 +723,15 @@ "normalize-path": "^2.1.1" } }, + "argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dev": true, + "requires": { + "sprintf-js": "~1.0.2" + } + }, "arr-diff": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/arr-diff/-/arr-diff-4.0.0.tgz", @@ -653,6 +756,16 @@ "integrity": "sha1-jCpe8kcv2ep0KwTHenUJO6J1fJM=", "dev": true }, + "array-includes": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/array-includes/-/array-includes-3.0.3.tgz", + "integrity": "sha1-GEtI9i2S10UrsxsyMWXH+L0CJm0=", + "dev": true, + "requires": { + "define-properties": "^1.1.2", + "es-abstract": "^1.7.0" + } + }, "array-unique": { "version": "0.3.2", "resolved": "https://registry.npmjs.org/array-unique/-/array-unique-0.3.2.tgz", @@ -674,6 +787,12 @@ "integrity": "sha1-8S4PPF13sLHN2RRpQuTpbB5N1SU=", "dev": true }, + "assertion-error": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/assertion-error/-/assertion-error-1.1.0.tgz", + "integrity": "sha512-jgsaNduz+ndvGyFt3uSuWqvy4lCnIJiovtouQN5JZHOKCS2QuhEdbcQHFhVksz2N2U9hXJo8odG7ETyWlEeuDw==", + "dev": true + }, "assign-symbols": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/assign-symbols/-/assign-symbols-1.0.0.tgz", @@ -963,6 +1082,12 @@ } } }, + "callsites": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz", + "integrity": "sha512-P8BjAsXvZS+VIDUI11hHCQEv74YT67YUi5JJFNWIqL235sBmjX4+qx9Muvls5ivyNENctx46xQLQ3aTuE7ssaQ==", + "dev": true + }, "camelcase": { "version": "5.3.1", "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", @@ -984,6 +1109,20 @@ "integrity": "sha1-G2gcIf+EAzyCZUMJBolCDRhxUdw=", "dev": true }, + "chai": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/chai/-/chai-4.2.0.tgz", + "integrity": "sha512-XQU3bhBukrOsQCuwZndwGcCVQHyZi53fQ6Ys1Fym7E4olpIqqZZhhoFJoaKVvV17lWQoXYwgWN2nF5crA8J2jw==", + "dev": true, + "requires": { + "assertion-error": "^1.1.0", + "check-error": "^1.0.2", + "deep-eql": "^3.0.1", + "get-func-name": "^2.0.0", + "pathval": "^1.1.0", + "type-detect": "^4.0.5" + } + }, "chalk": { "version": "2.4.2", "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", @@ -995,6 +1134,18 @@ "supports-color": "^5.3.0" } }, + "chardet": { + "version": "0.7.0", + "resolved": "https://registry.npmjs.org/chardet/-/chardet-0.7.0.tgz", + "integrity": "sha512-mT8iDcrh03qDGRRmoA2hmBJnxpllMR+0/0qlzjqZES6NdiWDcZkCNAk4rPFZ9Q85r27unkiNNg8ZOiwZXBHwcA==", + "dev": true + }, + "check-error": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/check-error/-/check-error-1.0.2.tgz", + "integrity": "sha1-V00xLt2Iu13YkS6Sht1sCu1KrII=", + "dev": true + }, "ci-info": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-2.0.0.tgz", @@ -1030,6 +1181,21 @@ } } }, + "cli-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/cli-cursor/-/cli-cursor-3.1.0.tgz", + "integrity": "sha512-I/zHAwsKf9FqGoXM4WWRACob9+SNukZTd94DWF57E4toouRulbCxcUh6RKUEOQlYTHJnzkPMySvPNaaSLNfLZw==", + "dev": true, + "requires": { + "restore-cursor": "^3.1.0" + } + }, + "cli-width": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cli-width/-/cli-width-2.2.0.tgz", + "integrity": "sha1-/xnt6Kml5XkyQUewwR8PvLq+1jk=", + "dev": true + }, "cliui": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/cliui/-/cliui-4.1.0.tgz", @@ -1123,6 +1289,12 @@ "integrity": "sha1-2Klr13/Wjfd5OnMDajug1UBdR3s=", "dev": true }, + "contains-path": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/contains-path/-/contains-path-0.1.0.tgz", + "integrity": "sha1-/ozxhP9mcLa67wGp1IYaXL7EEgo=", + "dev": true + }, "convert-source-map": { "version": "1.6.0", "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-1.6.0.tgz", @@ -1234,6 +1406,15 @@ "integrity": "sha1-6zkTMzRYd1y4TNGh+uBiEGu4dUU=", "dev": true }, + "deep-eql": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/deep-eql/-/deep-eql-3.0.1.tgz", + "integrity": "sha512-+QeIQyN5ZuO+3Uk5DYh6/1eKO0m0YmJFGNmFHGACpf1ClL1nmlV/p4gNgbl2pJGxgXb4faqo6UE+M5ACEMyVcw==", + "dev": true, + "requires": { + "type-detect": "^4.0.0" + } + }, "deep-is": { "version": "0.1.3", "resolved": "https://registry.npmjs.org/deep-is/-/deep-is-0.1.3.tgz", @@ -1314,6 +1495,15 @@ "integrity": "sha512-xLqpez+Zj9GKSnPWS0WZw1igGocZ+uua8+y+5dDNTT934N3QuY1sp2LkHzwiaYQGz60hMq0pjAshdeXm5VUOEw==", "dev": true }, + "doctrine": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-3.0.0.tgz", + "integrity": "sha512-yS+Q5i3hBf7GBkd4KG8a7eBNNWNGLTaEwwYWUijIYM7zrlYDM0BFXHjjPWlWZ1Rg7UaddZeIDmi9jF3HmqiQ2w==", + "dev": true, + "requires": { + "esutils": "^2.0.2" + } + }, "domexception": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/domexception/-/domexception-1.0.1.tgz", @@ -1333,6 +1523,12 @@ "safer-buffer": "^2.1.0" } }, + "emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, "end-of-stream": { "version": "1.4.1", "resolved": "https://registry.npmjs.org/end-of-stream/-/end-of-stream-1.4.1.tgz", @@ -1403,6 +1599,267 @@ } } }, + "eslint": { + "version": "6.6.0", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-6.6.0.tgz", + "integrity": "sha512-PpEBq7b6qY/qrOmpYQ/jTMDYfuQMELR4g4WI1M/NaSDDD/bdcMb+dj4Hgks7p41kW2caXsPsEZAEAyAgjVVC0g==", + "dev": true, + "requires": { + "@babel/code-frame": "^7.0.0", + "ajv": "^6.10.0", + "chalk": "^2.1.0", + "cross-spawn": "^6.0.5", + "debug": "^4.0.1", + "doctrine": "^3.0.0", + "eslint-scope": "^5.0.0", + "eslint-utils": "^1.4.3", + "eslint-visitor-keys": "^1.1.0", + "espree": "^6.1.2", + "esquery": "^1.0.1", + "esutils": "^2.0.2", + "file-entry-cache": "^5.0.1", + "functional-red-black-tree": "^1.0.1", + "glob-parent": "^5.0.0", + "globals": "^11.7.0", + "ignore": "^4.0.6", + "import-fresh": "^3.0.0", + "imurmurhash": "^0.1.4", + "inquirer": "^7.0.0", + "is-glob": "^4.0.0", + "js-yaml": "^3.13.1", + "json-stable-stringify-without-jsonify": "^1.0.1", + "levn": "^0.3.0", + "lodash": "^4.17.14", + "minimatch": "^3.0.4", + "mkdirp": "^0.5.1", + "natural-compare": "^1.4.0", + "optionator": "^0.8.2", + "progress": "^2.0.0", + "regexpp": "^2.0.1", + "semver": "^6.1.2", + "strip-ansi": "^5.2.0", + "strip-json-comments": "^3.0.1", + "table": "^5.2.3", + "text-table": "^0.2.0", + "v8-compile-cache": "^2.0.3" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, + "eslint-config-prettier": { + "version": "6.5.0", + "resolved": "https://registry.npmjs.org/eslint-config-prettier/-/eslint-config-prettier-6.5.0.tgz", + "integrity": "sha512-cjXp8SbO9VFGW/Z7mbTydqS9to8Z58E5aYhj3e1+Hx7lS9s6gL5ILKNpCqZAFOVYRcSkWPFYljHrEh8QFEK5EQ==", + "dev": true, + "requires": { + "get-stdin": "^6.0.0" + } + }, + "eslint-import-resolver-node": { + "version": "0.3.2", + "resolved": "https://registry.npmjs.org/eslint-import-resolver-node/-/eslint-import-resolver-node-0.3.2.tgz", + "integrity": "sha512-sfmTqJfPSizWu4aymbPr4Iidp5yKm8yDkHp+Ir3YiTHiiDfxh69mOUsmiqW6RZ9zRXFaF64GtYmN7e+8GHBv6Q==", + "dev": true, + "requires": { + "debug": "^2.6.9", + "resolve": "^1.5.0" + } + }, + "eslint-module-utils": { + "version": "2.4.1", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.4.1.tgz", + "integrity": "sha512-H6DOj+ejw7Tesdgbfs4jeS4YMFrT8uI8xwd1gtQqXssaR0EQ26L+2O/w6wkYFy2MymON0fTwHmXBvvfLNZVZEw==", + "dev": true, + "requires": { + "debug": "^2.6.8", + "pkg-dir": "^2.0.0" + } + }, + "eslint-plugin-import": { + "version": "2.18.2", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.18.2.tgz", + "integrity": "sha512-5ohpsHAiUBRNaBWAF08izwUGlbrJoJJ+W9/TBwsGoR1MnlgfwMIKrFeSjWbt6moabiXW9xNvtFz+97KHRfI4HQ==", + "dev": true, + "requires": { + "array-includes": "^3.0.3", + "contains-path": "^0.1.0", + "debug": "^2.6.9", + "doctrine": "1.5.0", + "eslint-import-resolver-node": "^0.3.2", + "eslint-module-utils": "^2.4.0", + "has": "^1.0.3", + "minimatch": "^3.0.4", + "object.values": "^1.1.0", + "read-pkg-up": "^2.0.0", + "resolve": "^1.11.0" + }, + "dependencies": { + "doctrine": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/doctrine/-/doctrine-1.5.0.tgz", + "integrity": "sha1-N53Ocw9hZvds76TmcHoVmwLFpvo=", + "dev": true, + "requires": { + "esutils": "^2.0.2", + "isarray": "^1.0.0" + } + }, + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + }, + "read-pkg-up": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-2.0.0.tgz", + "integrity": "sha1-a3KoBImE4MQeeVEP1en6mbO1Sb4=", + "dev": true, + "requires": { + "find-up": "^2.0.0", + "read-pkg": "^2.0.0" + } + } + } + }, + "eslint-plugin-jest": { + "version": "23.0.3", + "resolved": "https://registry.npmjs.org/eslint-plugin-jest/-/eslint-plugin-jest-23.0.3.tgz", + "integrity": "sha512-9cNxr66zeOyz1S9AkQL4/ouilR6QHpYj8vKOQZ60fu9hAt5PJWS4KqWqfr1aqN5NFEZSPjFOla2Azn+KTWiGwg==", + "dev": true, + "requires": { + "@typescript-eslint/experimental-utils": "^2.5.0" + } + }, + "eslint-plugin-prettier": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-prettier/-/eslint-plugin-prettier-3.1.1.tgz", + "integrity": "sha512-A+TZuHZ0KU0cnn56/9mfR7/KjUJ9QNVXUhwvRFSR7PGPe0zQR6PTkmyqg1AtUUEOzTqeRsUwyKFh0oVZKVCrtA==", + "dev": true, + "requires": { + "prettier-linter-helpers": "^1.0.0" + } + }, + "eslint-scope": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/eslint-scope/-/eslint-scope-5.0.0.tgz", + "integrity": "sha512-oYrhJW7S0bxAFDvWqzvMPRm6pcgcnWc4QnofCAqRTRfQC0JcwenzGglTtsLyIuuWFfkqDG9vz67cnttSd53djw==", + "dev": true, + "requires": { + "esrecurse": "^4.1.0", + "estraverse": "^4.1.1" + } + }, + "eslint-utils": { + "version": "1.4.3", + "resolved": "https://registry.npmjs.org/eslint-utils/-/eslint-utils-1.4.3.tgz", + "integrity": "sha512-fbBN5W2xdY45KulGXmLHZ3c3FHfVYmKg0IrAKGOkT/464PQsx2UeIzfz1RmEci+KLm1bBaAzZAh8+/E+XAeZ8Q==", + "dev": true, + "requires": { + "eslint-visitor-keys": "^1.1.0" + } + }, + "eslint-visitor-keys": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/eslint-visitor-keys/-/eslint-visitor-keys-1.1.0.tgz", + "integrity": "sha512-8y9YjtM1JBJU/A9Kc+SbaOV4y29sSWckBwMHa+FGtVj5gN/sbnKDf6xJUl+8g7FAij9LVaP8C24DUiH/f/2Z9A==", + "dev": true + }, + "espree": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/espree/-/espree-6.1.2.tgz", + "integrity": "sha512-2iUPuuPP+yW1PZaMSDM9eyVf8D5P0Hi8h83YtZ5bPc/zHYjII5khoixIUTMO794NOY8F/ThF1Bo8ncZILarUTA==", + "dev": true, + "requires": { + "acorn": "^7.1.0", + "acorn-jsx": "^5.1.0", + "eslint-visitor-keys": "^1.1.0" + }, + "dependencies": { + "acorn": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/acorn/-/acorn-7.1.0.tgz", + "integrity": "sha512-kL5CuoXA/dgxlBbVrflsflzQ3PAas7RYZB52NOm/6839iVYJgKMJ3cQJD+t2i5+qFa8h3MDpEOJiS64E8JLnSQ==", + "dev": true + } + } + }, + "esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "dev": true + }, + "esquery": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/esquery/-/esquery-1.0.1.tgz", + "integrity": "sha512-SmiyZ5zIWH9VM+SRUReLS5Q8a7GxtRdxEBVZpm98rJM7Sb+A9DVCndXfkeFUd3byderg+EbDkfnevfCwynWaNA==", + "dev": true, + "requires": { + "estraverse": "^4.0.0" + } + }, + "esrecurse": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/esrecurse/-/esrecurse-4.2.1.tgz", + "integrity": "sha512-64RBB++fIOAXPw3P9cy89qfMlvZEXZkqqJkjqqXIvzP5ezRZjW+lPWjw35UX/3EhUPFYbg5ER4JYgDw4007/DQ==", + "dev": true, + "requires": { + "estraverse": "^4.1.0" + } + }, "estraverse": { "version": "4.2.0", "resolved": "https://registry.npmjs.org/estraverse/-/estraverse-4.2.0.tgz", @@ -1533,6 +1990,17 @@ } } }, + "external-editor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/external-editor/-/external-editor-3.1.0.tgz", + "integrity": "sha512-hMQ4CX1p1izmuLYyZqLMO/qGNw10wSv9QDCPfzXfyFrOaCSSoRfqE1Kf1s5an66J5JZC62NewG+mK49jOCtQew==", + "dev": true, + "requires": { + "chardet": "^0.7.0", + "iconv-lite": "^0.4.24", + "tmp": "^0.0.33" + } + }, "extglob": { "version": "2.0.4", "resolved": "https://registry.npmjs.org/extglob/-/extglob-2.0.4.tgz", @@ -1610,6 +2078,12 @@ "integrity": "sha1-ewUhjd+WZ79/Nwv3/bLLFf3Qqkk=", "dev": true }, + "fast-diff": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/fast-diff/-/fast-diff-1.2.0.tgz", + "integrity": "sha512-xJuoT5+L99XlZ8twedaRf6Ax2TgQVxvgZOYoPKqZufmJib0tL2tegPBOZb1pVNgIhlqDlA0eO0c3wBvQcmzx4w==", + "dev": true + }, "fast-json-stable-stringify": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.0.0.tgz", @@ -1631,6 +2105,24 @@ "bser": "^2.0.0" } }, + "figures": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/figures/-/figures-3.1.0.tgz", + "integrity": "sha512-ravh8VRXqHuMvZt/d8GblBeqDMkdJMBdv/2KntFH+ra5MXkO7nxNKpzQ3n6QD/2da1kH0aWmNISdvhM7gl2gVg==", + "dev": true, + "requires": { + "escape-string-regexp": "^1.0.5" + } + }, + "file-entry-cache": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/file-entry-cache/-/file-entry-cache-5.0.1.tgz", + "integrity": "sha512-bCg29ictuBaKUwwArK4ouCaqDgLZcysCFLmM/Yn/FDoqndh/9vNuQfXRDvTuXKLxfD/JtZQGKFT8MGcJBK644g==", + "dev": true, + "requires": { + "flat-cache": "^2.0.1" + } + }, "fill-range": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-4.0.0.tgz", @@ -1663,6 +2155,23 @@ "locate-path": "^3.0.0" } }, + "flat-cache": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flat-cache/-/flat-cache-2.0.1.tgz", + "integrity": "sha512-LoQe6yDuUMDzQAEH8sgmh4Md6oZnc/7PjtwjNFSzveXqSHt6ka9fPBuso7IGf9Rz4uqnSnWiFH2B/zj24a5ReA==", + "dev": true, + "requires": { + "flatted": "^2.0.0", + "rimraf": "2.6.3", + "write": "1.0.3" + } + }, + "flatted": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/flatted/-/flatted-2.0.1.tgz", + "integrity": "sha512-a1hQMktqW9Nmqr5aktAux3JMNqaucxGcjtjWnZLHX7yyPCmlSV3M54nGYbqT8K+0GhF3NBgmJCc3ma+WOgX8Jg==", + "dev": true + }, "for-in": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/for-in/-/for-in-1.0.2.tgz", @@ -2255,12 +2764,30 @@ "integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A==", "dev": true }, + "functional-red-black-tree": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/functional-red-black-tree/-/functional-red-black-tree-1.0.1.tgz", + "integrity": "sha1-GwqzvVU7Kg1jmdKcDj6gslIHgyc=", + "dev": true + }, "get-caller-file": { "version": "1.0.3", "resolved": "https://registry.npmjs.org/get-caller-file/-/get-caller-file-1.0.3.tgz", "integrity": "sha512-3t6rVToeoZfYSGd8YoLFR2DJkiQrIiUrGcjvFX2mDw3bn6k2OtwHN0TNCLbBO+w8qTvimhDkv+LSscbJY1vE6w==", "dev": true }, + "get-func-name": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/get-func-name/-/get-func-name-2.0.0.tgz", + "integrity": "sha1-6td0q+5y4gQJQzoGY2YCPdaIekE=", + "dev": true + }, + "get-stdin": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-6.0.0.tgz", + "integrity": "sha512-jp4tHawyV7+fkkSKyvjuLZswblUtz+SQKzSWnBbii16BuZksJlU1wuBYXY75r+duh/llF1ur6oNwi+2ZzjKZ7g==", + "dev": true + }, "get-stream": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/get-stream/-/get-stream-4.1.0.tgz", @@ -2299,6 +2826,15 @@ "path-is-absolute": "^1.0.0" } }, + "glob-parent": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.0.tgz", + "integrity": "sha512-qjtRgnIVmOfnKUE3NJAQEdk+lKrxfw8t5ke7SXtfMTHcjsBfOfWXCQfdb30zfDoZQ2IRSIiidmjtbHZPZ++Ihw==", + "dev": true, + "requires": { + "is-glob": "^4.0.1" + } + }, "globals": { "version": "11.12.0", "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", @@ -2441,6 +2977,30 @@ "safer-buffer": ">= 2.1.2 < 3" } }, + "ignore": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-4.0.6.tgz", + "integrity": "sha512-cyFDKrqc/YdcWFniJhzI42+AzS+gNwmUzOSFcRCQYwySuBBBy/KjuxWLZ/FHEH6Moq1NizMOBWyTcv8O4OZIMg==", + "dev": true + }, + "import-fresh": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/import-fresh/-/import-fresh-3.1.0.tgz", + "integrity": "sha512-PpuksHKGt8rXfWEr9m9EHIpgyyaltBy8+eF6GJM0QCAxMgxCfucMF3mjecK2QsJr0amJW7gTqh5/wht0z2UhEQ==", + "dev": true, + "requires": { + "parent-module": "^1.0.0", + "resolve-from": "^4.0.0" + }, + "dependencies": { + "resolve-from": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-4.0.0.tgz", + "integrity": "sha512-pb/MYmXstAkysRFx8piNI1tGFNQIFA3vkE3Gq4EuA1dF6gHp/+vgZqsCGJapvy8N3Q+4o7FwvquPJcnZ7RYy4g==", + "dev": true + } + } + }, "import-local": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/import-local/-/import-local-2.0.0.tgz", @@ -2484,6 +3044,72 @@ "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==", "dev": true }, + "inquirer": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/inquirer/-/inquirer-7.0.0.tgz", + "integrity": "sha512-rSdC7zelHdRQFkWnhsMu2+2SO41mpv2oF2zy4tMhmiLWkcKbOAs87fWAJhVXttKVwhdZvymvnuM95EyEXg2/tQ==", + "dev": true, + "requires": { + "ansi-escapes": "^4.2.1", + "chalk": "^2.4.2", + "cli-cursor": "^3.1.0", + "cli-width": "^2.0.0", + "external-editor": "^3.0.3", + "figures": "^3.0.0", + "lodash": "^4.17.15", + "mute-stream": "0.0.8", + "run-async": "^2.2.0", + "rxjs": "^6.4.0", + "string-width": "^4.1.0", + "strip-ansi": "^5.1.0", + "through": "^2.3.6" + }, + "dependencies": { + "ansi-escapes": { + "version": "4.2.1", + "resolved": "https://registry.npmjs.org/ansi-escapes/-/ansi-escapes-4.2.1.tgz", + "integrity": "sha512-Cg3ymMAdN10wOk/VYfLV7KCQyv7EDirJ64500sU7n9UlmioEtDuU5Gd+hj73hXSU/ex7tHJSssmyftDdkMLO8Q==", + "dev": true, + "requires": { + "type-fest": "^0.5.2" + } + }, + "ansi-regex": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.0.tgz", + "integrity": "sha512-bY6fj56OUQ0hU1KjFNDQuJFezqKdrAyFdIevADiqrWHwSlbmBNMHp5ak2f40Pm8JTFyM2mqxkG6ngkHO11f/lg==", + "dev": true + }, + "is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true + }, + "string-width": { + "version": "4.2.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.0.tgz", + "integrity": "sha512-zUz5JD+tgqtuDjMhwIg5uFVV3dtqZ9yQJlZVfq4I01/K5Paj5UHj7VyrQOJvzawSVlKpObApbfD0Ed6yJc+1eg==", + "dev": true, + "requires": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.0" + }, + "dependencies": { + "strip-ansi": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.0.tgz", + "integrity": "sha512-AuvKTrTfQNYNIctbR1K/YGTR1756GycPsg7b9bdV9Duqur4gv6aKqHXah67Z8ImS7WEz5QVcOtlfW2rZEugt6w==", + "dev": true, + "requires": { + "ansi-regex": "^5.0.0" + } + } + } + } + } + }, "invariant": { "version": "2.2.4", "resolved": "https://registry.npmjs.org/invariant/-/invariant-2.2.4.tgz", @@ -2597,6 +3223,12 @@ "integrity": "sha1-YrEQ4omkcUGOPsNqYX1HLjAd/Ik=", "dev": true }, + "is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha1-qIwCU1eR8C7TfHahueqXc8gz+MI=", + "dev": true + }, "is-fullwidth-code-point": { "version": "2.0.0", "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-2.0.0.tgz", @@ -2609,6 +3241,15 @@ "integrity": "sha512-cTIB4yPYL/Grw0EaSzASzg6bBy9gqCofvWN8okThAYIxKJZC+udlRAmGbM0XLeniEJSs8uEgHPGuHSe1XsOLSQ==", "dev": true }, + "is-glob": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.1.tgz", + "integrity": "sha512-5G0tKtBTFImOqDnLB2hG6Bp2qcKEFduo4tZu9MT/H6NQv/ghhy30o55ufafxJ/LdH79LLs2Kfrn85TLKyA7BUg==", + "dev": true, + "requires": { + "is-extglob": "^2.1.1" + } + }, "is-number": { "version": "3.0.0", "resolved": "https://registry.npmjs.org/is-number/-/is-number-3.0.0.tgz", @@ -2629,6 +3270,12 @@ } } }, + "is-promise": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/is-promise/-/is-promise-2.1.0.tgz", + "integrity": "sha1-eaKp7OfwlugPNtKy87wWwf9L8/o=", + "dev": true + }, "is-regex": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/is-regex/-/is-regex-1.0.4.tgz", @@ -3255,6 +3902,16 @@ "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==", "dev": true }, + "js-yaml": { + "version": "3.13.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.13.1.tgz", + "integrity": "sha512-YfbcO7jXDdyj0DGxYVSlSeQNHbD7XPWvrVWeVUujrQEoZzWJIRrCPoyk6kL6IAjAG2IolMK4T0hNUe0HOUs5Jw==", + "dev": true, + "requires": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + } + }, "jsbn": { "version": "0.1.1", "resolved": "https://registry.npmjs.org/jsbn/-/jsbn-0.1.1.tgz", @@ -3319,6 +3976,12 @@ "integrity": "sha512-xbbCH5dCYU5T8LcEhhuh7HJ88HXuW3qsI3Y0zOZFKfZEHcpWiHU/Jxzk629Brsab/mMiHQti9wMP+845RPe3Vg==", "dev": true }, + "json-stable-stringify-without-jsonify": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/json-stable-stringify-without-jsonify/-/json-stable-stringify-without-jsonify-1.0.1.tgz", + "integrity": "sha1-nbe1lJatPzz+8wp1FC0tkwrXJlE=", + "dev": true + }, "json-stringify-safe": { "version": "5.0.1", "resolved": "https://registry.npmjs.org/json-stringify-safe/-/json-stringify-safe-5.0.1.tgz", @@ -3423,6 +4086,12 @@ "integrity": "sha1-7dFMgk4sycHgsKG0K7UhBRakJDg=", "dev": true }, + "lodash.unescape": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/lodash.unescape/-/lodash.unescape-4.0.1.tgz", + "integrity": "sha1-vyJJiGzlFM2hEvrpIYzcBlIR/Jw=", + "dev": true + }, "loose-envify": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/loose-envify/-/loose-envify-1.4.0.tgz", @@ -3631,6 +4300,12 @@ "integrity": "sha1-VgiurfwAvmwpAd9fmGF4jeDVl8g=", "dev": true }, + "mute-stream": { + "version": "0.0.8", + "resolved": "https://registry.npmjs.org/mute-stream/-/mute-stream-0.0.8.tgz", + "integrity": "sha512-nnbWWOkoWyUsTjKrhgD0dcz22mdkSnpYqbEjIm2nhwhuxlSkpywJmBo8h0ZqJdkp73mb90SssHkN4rsRaBAfAA==", + "dev": true + }, "nan": { "version": "2.14.0", "resolved": "https://registry.npmjs.org/nan/-/nan-2.14.0.tgz", @@ -3675,6 +4350,37 @@ "integrity": "sha512-1nh45deeb5olNY7eX82BkPO7SSxR5SSYJiPTrTdFUVYwAl8CKMA5N9PjTYkHiRjisVcxcQ1HXdLhx2qxxJzLNQ==", "dev": true }, + "nock": { + "version": "11.7.0", + "resolved": "https://registry.npmjs.org/nock/-/nock-11.7.0.tgz", + "integrity": "sha512-7c1jhHew74C33OBeRYyQENT+YXQiejpwIrEjinh6dRurBae+Ei4QjeUaPlkptIF0ZacEiVCnw8dWaxqepkiihg==", + "dev": true, + "requires": { + "chai": "^4.1.2", + "debug": "^4.1.0", + "json-stringify-safe": "^5.0.1", + "lodash": "^4.17.13", + "mkdirp": "^0.5.0", + "propagate": "^2.0.0" + }, + "dependencies": { + "debug": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.1.1.tgz", + "integrity": "sha512-pYAIzeRo8J6KPEaJ0VWOh5Pzkbw/RetuzehGM7QRRX5he4fPHx2rdKMB256ehJCkX+XRQm16eZLqLNS8RSZXZw==", + "dev": true, + "requires": { + "ms": "^2.1.1" + } + }, + "ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + } + } + }, "node-int64": { "version": "0.4.0", "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", @@ -3845,6 +4551,18 @@ } } }, + "object.values": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/object.values/-/object.values-1.1.0.tgz", + "integrity": "sha512-8mf0nKLAoFX6VlNVdhGj31SVYpaNFtUnuoOXWyFEstsWRgU837AK+JYM0iAxwkSzGRbwn8cbFmgbyxj1j4VbXg==", + "dev": true, + "requires": { + "define-properties": "^1.1.3", + "es-abstract": "^1.12.0", + "function-bind": "^1.1.1", + "has": "^1.0.3" + } + }, "once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", @@ -3854,6 +4572,15 @@ "wrappy": "1" } }, + "onetime": { + "version": "5.1.0", + "resolved": "https://registry.npmjs.org/onetime/-/onetime-5.1.0.tgz", + "integrity": "sha512-5NcSkPHhwTVFIQN+TUqXoS5+dlElHXdpAWu9I0HP20YOtIi+aZ0Ct82jdlILDxjLEAWwvm+qj1m6aEtsDVmm6Q==", + "dev": true, + "requires": { + "mimic-fn": "^2.1.0" + } + }, "optimist": { "version": "0.6.1", "resolved": "https://registry.npmjs.org/optimist/-/optimist-0.6.1.tgz", @@ -3905,6 +4632,12 @@ "mem": "^4.0.0" } }, + "os-tmpdir": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/os-tmpdir/-/os-tmpdir-1.0.2.tgz", + "integrity": "sha1-u+Z0BseaqFxc/sdm/lc0VV36EnQ=", + "dev": true + }, "p-defer": { "version": "1.0.0", "resolved": "https://registry.npmjs.org/p-defer/-/p-defer-1.0.0.tgz", @@ -3962,6 +4695,15 @@ "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", "dev": true }, + "parent-module": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/parent-module/-/parent-module-1.0.1.tgz", + "integrity": "sha512-GQ2EWRpQV8/o+Aw8YqtfZZPfNRWZYkbidE9k5rpl/hC3vtHHBfGm2Ifi6qWV+coDGkrUKZAxE3Lot5kcsRlh+g==", + "dev": true, + "requires": { + "callsites": "^3.0.0" + } + }, "parse-json": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-4.0.0.tgz", @@ -4017,6 +4759,12 @@ "pify": "^3.0.0" } }, + "pathval": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/pathval/-/pathval-1.1.0.tgz", + "integrity": "sha1-uULm1L3mUwBe9rcTYd74cn0GReA=", + "dev": true + }, "performance-now": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/performance-now/-/performance-now-2.1.0.tgz", @@ -4038,6 +4786,60 @@ "node-modules-regexp": "^1.0.0" } }, + "pkg-dir": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/pkg-dir/-/pkg-dir-2.0.0.tgz", + "integrity": "sha1-9tXREJ4Z1j7fQo4L1X4Sd3YVM0s=", + "dev": true, + "requires": { + "find-up": "^2.1.0" + }, + "dependencies": { + "find-up": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-2.1.0.tgz", + "integrity": "sha1-RdG35QbHF93UgndaK3eSCjwMV6c=", + "dev": true, + "requires": { + "locate-path": "^2.0.0" + } + }, + "locate-path": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-2.0.0.tgz", + "integrity": "sha1-K1aLJl7slExtnA3pw9u7ygNUzY4=", + "dev": true, + "requires": { + "p-locate": "^2.0.0", + "path-exists": "^3.0.0" + } + }, + "p-limit": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-1.3.0.tgz", + "integrity": "sha512-vvcXsLAJ9Dr5rQOPk7toZQZJApBl2K4J6dANSsEuh6QI41JYcsS/qhTGa9ErIUUgK3WNQoJYvylxvjqmiqEA9Q==", + "dev": true, + "requires": { + "p-try": "^1.0.0" + } + }, + "p-locate": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-2.0.0.tgz", + "integrity": "sha1-IKAQOyIqcMj9OcwuWAaA893l7EM=", + "dev": true, + "requires": { + "p-limit": "^1.1.0" + } + }, + "p-try": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-1.0.0.tgz", + "integrity": "sha1-y8ec26+P1CKOE/Yh8rGiN8GyB7M=", + "dev": true + } + } + }, "pn": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/pn/-/pn-1.1.0.tgz", @@ -4062,6 +4864,15 @@ "integrity": "sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==", "dev": true }, + "prettier-linter-helpers": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/prettier-linter-helpers/-/prettier-linter-helpers-1.0.0.tgz", + "integrity": "sha512-GbK2cP9nraSSUF9N2XwUwqfzlAFlMNYYl+ShE/V+H8a9uNl/oUqB1w2EL54Jh0OlyRSd8RfWYJ3coVS4TROP2w==", + "dev": true, + "requires": { + "fast-diff": "^1.1.2" + } + }, "pretty-format": { "version": "24.8.0", "resolved": "https://registry.npmjs.org/pretty-format/-/pretty-format-24.8.0.tgz", @@ -4080,6 +4891,12 @@ "integrity": "sha512-3ouUOpQhtgrbOa17J7+uxOTpITYWaGP7/AhoR3+A+/1e9skrzelGi/dXzEYyvbxubEF6Wn2ypscTKiKJFFn1ag==", "dev": true }, + "progress": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/progress/-/progress-2.0.3.tgz", + "integrity": "sha512-7PiHtLll5LdnKIMw100I+8xJXR5gW2QwWYkT6iJva0bXitZKa/XMrSbdmg3r2Xnaidz9Qumd0VPaMrZlF9V9sA==", + "dev": true + }, "prompts": { "version": "2.1.0", "resolved": "https://registry.npmjs.org/prompts/-/prompts-2.1.0.tgz", @@ -4090,6 +4907,12 @@ "sisteransi": "^1.0.0" } }, + "propagate": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/propagate/-/propagate-2.0.1.tgz", + "integrity": "sha512-vGrhOavPSTz4QVNuBNdcNXePNdNMaO1xj9yBeH1ScQPjk/rhg9sSlCXPhMkFuaNNW/syTvYqsnbIJxMBfRbbag==", + "dev": true + }, "psl": { "version": "1.3.0", "resolved": "https://registry.npmjs.org/psl/-/psl-1.3.0.tgz", @@ -4124,6 +4947,55 @@ "integrity": "sha512-aUk3bHfZ2bRSVFFbbeVS4i+lNPZr3/WM5jT2J5omUVV1zzcs1nAaf3l51ctA5FFvCRbhrH0bdAsRRQddFJZPtA==", "dev": true }, + "read-pkg": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/read-pkg/-/read-pkg-2.0.0.tgz", + "integrity": "sha1-jvHAYjxqbbDcZxPEv6xGMysjaPg=", + "dev": true, + "requires": { + "load-json-file": "^2.0.0", + "normalize-package-data": "^2.3.2", + "path-type": "^2.0.0" + }, + "dependencies": { + "load-json-file": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/load-json-file/-/load-json-file-2.0.0.tgz", + "integrity": "sha1-eUfkIUmvgNaWy/eXvKq8/h/inKg=", + "dev": true, + "requires": { + "graceful-fs": "^4.1.2", + "parse-json": "^2.2.0", + "pify": "^2.0.0", + "strip-bom": "^3.0.0" + } + }, + "parse-json": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/parse-json/-/parse-json-2.2.0.tgz", + "integrity": "sha1-9ID0BDTvgHQfhGkJn43qGPVaTck=", + "dev": true, + "requires": { + "error-ex": "^1.2.0" + } + }, + "path-type": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/path-type/-/path-type-2.0.0.tgz", + "integrity": "sha1-8BLMuEFbcJb8LaoQVMPXI4lZTHM=", + "dev": true, + "requires": { + "pify": "^2.0.0" + } + }, + "pify": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/pify/-/pify-2.3.0.tgz", + "integrity": "sha1-7RQaasBDqEnqWISY59yosVMw6Qw=", + "dev": true + } + } + }, "read-pkg-up": { "version": "4.0.0", "resolved": "https://registry.npmjs.org/read-pkg-up/-/read-pkg-up-4.0.0.tgz", @@ -4181,6 +5053,12 @@ "safe-regex": "^1.1.0" } }, + "regexpp": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/regexpp/-/regexpp-2.0.1.tgz", + "integrity": "sha512-lv0M6+TkDVniA3aD1Eg0DVpfU/booSu7Eev3TDO/mZKHBfVjgCGTV4t4buppESEYDtkArYFOxTJWv6S5C+iaNw==", + "dev": true + }, "remove-trailing-separator": { "version": "1.1.0", "resolved": "https://registry.npmjs.org/remove-trailing-separator/-/remove-trailing-separator-1.1.0.tgz", @@ -4307,6 +5185,16 @@ "integrity": "sha1-LGN/53yJOv0qZj/iGqkIAGjiBSo=", "dev": true }, + "restore-cursor": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/restore-cursor/-/restore-cursor-3.1.0.tgz", + "integrity": "sha512-l+sSefzHpj5qimhFSE5a8nufZYAM3sBSVMAPtYkmC+4EH2anSGaEMXSD0izRQbu9nfyQ9y5JrVmp7E8oZrUjvA==", + "dev": true, + "requires": { + "onetime": "^5.1.0", + "signal-exit": "^3.0.2" + } + }, "ret": { "version": "0.1.15", "resolved": "https://registry.npmjs.org/ret/-/ret-0.1.15.tgz", @@ -4328,6 +5216,24 @@ "integrity": "sha512-nfMOlASu9OnRJo1mbEk2cz0D56a1MBNrJ7orjRZQG10XDyuvwksKbuXNp6qa+kbn839HwjwhBzhFmdsaEAfauA==", "dev": true }, + "run-async": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/run-async/-/run-async-2.3.0.tgz", + "integrity": "sha1-A3GrSuC91yDUFm19/aZP96RFpsA=", + "dev": true, + "requires": { + "is-promise": "^2.1.0" + } + }, + "rxjs": { + "version": "6.5.3", + "resolved": "https://registry.npmjs.org/rxjs/-/rxjs-6.5.3.tgz", + "integrity": "sha512-wuYsAYYFdWTAnAaPoKGNhfpWwKZbJW+HgAJ+mImp+Epl7BG8oNWBCTyRM8gba9k4lk8BgWdoYm21Mo/RYhhbgA==", + "dev": true, + "requires": { + "tslib": "^1.9.0" + } + }, "safe-buffer": { "version": "5.1.2", "resolved": "https://registry.npmjs.org/safe-buffer/-/safe-buffer-5.1.2.tgz", @@ -4455,6 +5361,17 @@ "integrity": "sha512-ZcYcZcT69nSLAR2oLN2JwNmLkJEKGooFMCdvOkFrToUt/WfcRWqhIg4P4KwY4dmLbuyXIx4o4YmPsvMRJYJd/w==", "dev": true }, + "slice-ansi": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/slice-ansi/-/slice-ansi-2.1.0.tgz", + "integrity": "sha512-Qu+VC3EwYLldKa1fCxuuvULvSJOKEgk9pi8dZeCVK7TqBfUNTH4sFkk4joj8afVSfAYgJoSOetjx9QWOJ5mYoQ==", + "dev": true, + "requires": { + "ansi-styles": "^3.2.0", + "astral-regex": "^1.0.0", + "is-fullwidth-code-point": "^2.0.0" + } + }, "snapdragon": { "version": "0.8.2", "resolved": "https://registry.npmjs.org/snapdragon/-/snapdragon-0.8.2.tgz", @@ -4650,6 +5567,12 @@ "extend-shallow": "^3.0.0" } }, + "sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha1-BOaSb2YolTVPPdAVIDYzuFcpfiw=", + "dev": true + }, "sshpk": { "version": "1.16.1", "resolved": "https://registry.npmjs.org/sshpk/-/sshpk-1.16.1.tgz", @@ -4784,6 +5707,12 @@ "integrity": "sha1-u0P/VZim6wXYm1n80SnJgzE2Br8=", "dev": true }, + "strip-json-comments": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.0.1.tgz", + "integrity": "sha512-VTyMAUfdm047mwKl+u79WIdrZxtFtn+nBxHeb844XBQ9uMNTuTHdx2hc5RiAJYqwTj3wc/xe5HLSdJSkJ+WfZw==", + "dev": true + }, "supports-color": { "version": "5.5.0", "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", @@ -4799,6 +5728,37 @@ "integrity": "sha512-9QNk5KwDF+Bvz+PyObkmSYjI5ksVUYtjW7AU22r2NKcfLJcXp96hkDWU3+XndOsUb+AQ9QhfzfCT2O+CNWT5Tw==", "dev": true }, + "table": { + "version": "5.4.6", + "resolved": "https://registry.npmjs.org/table/-/table-5.4.6.tgz", + "integrity": "sha512-wmEc8m4fjnob4gt5riFRtTu/6+4rSe12TpAELNSqHMfF3IqnA+CH37USM6/YR3qRZv7e56kAEAtd6nKZaxe0Ug==", + "dev": true, + "requires": { + "ajv": "^6.10.2", + "lodash": "^4.17.14", + "slice-ansi": "^2.1.0", + "string-width": "^3.0.0" + }, + "dependencies": { + "emoji-regex": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-7.0.3.tgz", + "integrity": "sha512-CwBLREIQ7LvYFB0WyRvwhq5N5qPhc6PMjD6bYggFlI5YyDgl+0vxq5VHbMOFqLg7hfWzmu8T5Z1QofhmTIhItA==", + "dev": true + }, + "string-width": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-3.1.0.tgz", + "integrity": "sha512-vafcv6KjVZKSgz06oM/H6GDBrAtz8vdhQakGjFIvNrHA6y3HCF1CInLy+QLq8dTJPQ1b+KDUqDFctkdRW44e1w==", + "dev": true, + "requires": { + "emoji-regex": "^7.0.1", + "is-fullwidth-code-point": "^2.0.0", + "strip-ansi": "^5.1.0" + } + } + } + }, "test-exclude": { "version": "5.2.3", "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-5.2.3.tgz", @@ -4811,12 +5771,33 @@ "require-main-filename": "^2.0.0" } }, + "text-table": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/text-table/-/text-table-0.2.0.tgz", + "integrity": "sha1-f17oI66AUgfACvLfSoTsP8+lcLQ=", + "dev": true + }, "throat": { "version": "4.1.0", "resolved": "https://registry.npmjs.org/throat/-/throat-4.1.0.tgz", "integrity": "sha1-iQN8vJLFarGJJua6TLsgDhVnKmo=", "dev": true }, + "through": { + "version": "2.3.8", + "resolved": "https://registry.npmjs.org/through/-/through-2.3.8.tgz", + "integrity": "sha1-DdTJ/6q8NXlgsbckEV1+Doai4fU=", + "dev": true + }, + "tmp": { + "version": "0.0.33", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.0.33.tgz", + "integrity": "sha512-jRCJlojKnZ3addtTOjdIqoRuPEKBvNXcGYqzO6zWZX8KfKEpnGY5jfggJQ3EjKuu8D4bJRr0y+cYJFmYbImXGw==", + "dev": true, + "requires": { + "os-tmpdir": "~1.0.2" + } + }, "tmpl": { "version": "1.0.4", "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.4.tgz", @@ -4936,6 +5917,21 @@ } } }, + "tslib": { + "version": "1.10.0", + "resolved": "https://registry.npmjs.org/tslib/-/tslib-1.10.0.tgz", + "integrity": "sha512-qOebF53frne81cf0S9B41ByenJ3/IuH8yJKngAX35CmiZySA0khhkovshKK+jGCaMnVomla7gVlIcc3EvKPbTQ==", + "dev": true + }, + "tsutils": { + "version": "3.17.1", + "resolved": "https://registry.npmjs.org/tsutils/-/tsutils-3.17.1.tgz", + "integrity": "sha512-kzeQ5B8H3w60nFY2g8cJIuH7JDpsALXySGtwGJ0p2LSjLgay3NdIpqq5SoOBe46bKDW2iq25irHCr8wjomUS2g==", + "dev": true, + "requires": { + "tslib": "^1.8.1" + } + }, "tunnel": { "version": "0.0.4", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.4.tgz", @@ -4965,6 +5961,18 @@ "prelude-ls": "~1.1.2" } }, + "type-detect": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/type-detect/-/type-detect-4.0.8.tgz", + "integrity": "sha512-0fr/mIH1dlO+x7TlcMy+bIDqKPsw/70tVyeHW787goQjhmqaZe10uwLujubK9q9Lg6Fiho1KUKDYz0Z7k7g5/g==", + "dev": true + }, + "type-fest": { + "version": "0.5.2", + "resolved": "https://registry.npmjs.org/type-fest/-/type-fest-0.5.2.tgz", + "integrity": "sha512-DWkS49EQKVX//Tbupb9TFa19c7+MK1XmzkrZUR8TAktmE/DizXoaoJV6TZ/tSIPXipqNiRI6CyAe7x69Jb6RSw==", + "dev": true + }, "typed-rest-client": { "version": "1.5.0", "resolved": "https://registry.npmjs.org/typed-rest-client/-/typed-rest-client-1.5.0.tgz", @@ -5096,6 +6104,12 @@ "resolved": "https://registry.npmjs.org/uuid/-/uuid-3.3.3.tgz", "integrity": "sha512-pW0No1RGHgzlpHJO1nsVrHKpOEIxkGg1xB+v0ZmdNH5OAeAwzAVrCnI2/6Mtx+Uys6iaylxa+D3g4j63IKKjSQ==" }, + "v8-compile-cache": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/v8-compile-cache/-/v8-compile-cache-2.1.0.tgz", + "integrity": "sha512-usZBT3PW+LOjM25wbqIlZwPeJV+3OSz3M1k1Ws8snlW39dZyYL9lOGC5FgPVHfk0jKmjiDV8Z0mIbVQPiwFs7g==", + "dev": true + }, "validate-npm-package-license": { "version": "3.0.4", "resolved": "https://registry.npmjs.org/validate-npm-package-license/-/validate-npm-package-license-3.0.4.tgz", @@ -5241,6 +6255,15 @@ "integrity": "sha1-tSQ9jz7BqjXxNkYFvA0QNuMKtp8=", "dev": true }, + "write": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/write/-/write-1.0.3.tgz", + "integrity": "sha512-/lg70HAjtkUgWPVZhZcm+T4hkL8Zbtp1nFNOn3lRrxnlv50SRBv7cR7RqR+GMsd3hUXy9hWBo4CHTbFTcOYwig==", + "dev": true, + "requires": { + "mkdirp": "^0.5.1" + } + }, "write-file-atomic": { "version": "2.4.1", "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-2.4.1.tgz", diff --git a/package.json b/package.json index a235bd7..5bece1c 100644 --- a/package.json +++ b/package.json @@ -7,6 +7,7 @@ "scripts": { "build": "tsc", "test": "tsc --noEmit && jest --coverage", + "lint": "eslint **/*.ts --cache", "format": "prettier --write **/*.ts", "format-check": "prettier --check **/*.ts", "release": "ncc build -o dist/restore src/restore.ts && ncc build -o dist/save src/save.ts && git add -f dist/" @@ -31,11 +32,20 @@ }, "devDependencies": { "@types/jest": "^24.0.13", + "@types/nock": "^11.1.0", "@types/node": "^12.0.4", "@types/uuid": "^3.4.5", + "@typescript-eslint/eslint-plugin": "^2.7.0", + "@typescript-eslint/parser": "^2.7.0", "@zeit/ncc": "^0.20.5", + "eslint": "^6.6.0", + "eslint-config-prettier": "^6.5.0", + "eslint-plugin-import": "^2.18.2", + "eslint-plugin-jest": "^23.0.3", + "eslint-plugin-prettier": "^3.1.1", "jest": "^24.8.0", "jest-circus": "^24.7.1", + "nock": "^11.7.0", "prettier": "1.18.2", "ts-jest": "^24.0.2", "typescript": "^3.6.4" diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 30b5009..e448157 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -1,13 +1,40 @@ import * as core from "@actions/core"; import * as fs from "fs"; - import { BearerCredentialHandler } from "typed-rest-client/Handlers"; import { HttpClient } from "typed-rest-client/HttpClient"; import { IHttpClientResponse } from "typed-rest-client/Interfaces"; -import { RestClient, IRequestOptions } from "typed-rest-client/RestClient"; - +import { IRequestOptions, RestClient } from "typed-rest-client/RestClient"; import { ArtifactCacheEntry } from "./contracts"; +function getCacheUrl(): string { + // Ideally we just use ACTIONS_CACHE_URL + const cacheUrl: string = ( + process.env["ACTIONS_CACHE_URL"] || + process.env["ACTIONS_RUNTIME_URL"] || + "" + ).replace("pipelines", "artifactcache"); + if (!cacheUrl) { + throw new Error( + "Cache Service Url not found, unable to restore cache." + ); + } + + core.debug(`Cache Url: ${cacheUrl}`); + return cacheUrl; +} + +function createAcceptHeader(type: string, apiVersion: string): string { + return `${type};api-version=${apiVersion}`; +} + +function getRequestOptions(): IRequestOptions { + const requestOptions: IRequestOptions = { + acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + }; + + return requestOptions; +} + export async function getCacheEntry( keys: string[] ): Promise { @@ -43,16 +70,6 @@ export async function getCacheEntry( return cacheResult; } -export async function downloadCache( - cacheEntry: ArtifactCacheEntry, - archivePath: string -): Promise { - const stream = fs.createWriteStream(archivePath); - const httpClient = new HttpClient("actions/cache"); - const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); - await pipeResponseToStream(downloadResponse, stream); -} - async function pipeResponseToStream( response: IHttpClientResponse, stream: NodeJS.WritableStream @@ -64,7 +81,23 @@ async function pipeResponseToStream( }); } -export async function saveCache(stream: NodeJS.ReadableStream, key: string) { +export async function downloadCache( + cacheEntry: ArtifactCacheEntry, + archivePath: string +): Promise { + const stream = fs.createWriteStream(archivePath); + const httpClient = new HttpClient("actions/cache"); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); + await pipeResponseToStream(downloadResponse, stream); +} + +export async function saveCache( + key: string, + archivePath: string +): Promise { + const stream = fs.createReadStream(archivePath); + const cacheUrl = getCacheUrl(); const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const bearerCredentialHandler = new BearerCredentialHandler(token); @@ -93,32 +126,3 @@ export async function saveCache(stream: NodeJS.ReadableStream, key: string) { core.info("Cache saved successfully"); } - -function getRequestOptions(): IRequestOptions { - const requestOptions: IRequestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") - }; - - return requestOptions; -} - -function createAcceptHeader(type: string, apiVersion: string): string { - return `${type};api-version=${apiVersion}`; -} - -function getCacheUrl(): string { - // Ideally we just use ACTIONS_CACHE_URL - let cacheUrl: string = ( - process.env["ACTIONS_CACHE_URL"] || - process.env["ACTIONS_RUNTIME_URL"] || - "" - ).replace("pipelines", "artifactcache"); - if (!cacheUrl) { - throw new Error( - "Cache Service Url not found, unable to restore cache." - ); - } - - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; -} diff --git a/src/constants.ts b/src/constants.ts index 80f6de9..5f26e8c 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -1,14 +1,20 @@ -export namespace Inputs { - export const Key = "key"; - export const Path = "path"; - export const RestoreKeys = "restore-keys"; +export enum Inputs { + Key = "key", + Path = "path", + RestoreKeys = "restore-keys" } -export namespace Outputs { - export const CacheHit = "cache-hit"; +export enum Outputs { + CacheHit = "cache-hit" } -export namespace State { - export const CacheKey = "CACHE_KEY"; - export const CacheResult = "CACHE_RESULT"; +export enum State { + CacheKey = "CACHE_KEY", + CacheResult = "CACHE_RESULT" +} + +export enum Events { + Key = "GITHUB_EVENT_NAME", + Push = "push", + PullRequest = "pull_request" } diff --git a/src/restore.ts b/src/restore.ts index 060c8d4..87a2684 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -1,18 +1,25 @@ import * as core from "@actions/core"; import { exec } from "@actions/exec"; import * as io from "@actions/io"; - -import * as fs from "fs"; import * as path from "path"; - import * as cacheHttpClient from "./cacheHttpClient"; -import { Inputs, State } from "./constants"; +import { Events, Inputs, State } from "./constants"; import * as utils from "./utils/actionUtils"; -async function run() { +async function run(): Promise { try { // Validate inputs, this can cause task failure - let cachePath = utils.resolvePath( + if (!utils.isValidEvent()) { + core.setFailed( + `Event Validation Error: The event type ${ + process.env[Events.Key] + } is not supported. Only ${utils + .getSupportedEvents() + .join(", ")} events are supported at this time.` + ); + } + + const cachePath = utils.resolvePath( core.getInput(Inputs.Path, { required: true }) ); core.debug(`Cache Path: ${cachePath}`); @@ -60,7 +67,7 @@ async function run() { return; } - let archivePath = path.join( + const archivePath = path.join( await utils.createTempDirectory(), "cache.tgz" ); @@ -72,26 +79,33 @@ async function run() { // Download the cache from the cache entry await cacheHttpClient.downloadCache(cacheEntry, archivePath); - io.mkdirP(cachePath); + const archiveFileSize = utils.getArchiveFileSize(archivePath); + core.info( + `Cache Size: ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B)` + ); + + // Create directory to extract tar into + await io.mkdirP(cachePath); // http://man7.org/linux/man-pages/man1/tar.1.html // tar [-options] [files or directories which to add into archive] - const args = ["-xz"]; - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - args.push("--force-local"); - archivePath = archivePath.replace(/\\/g, "/"); - cachePath = cachePath.replace(/\\/g, "/"); - } - args.push(...["-f", archivePath, "-C", cachePath]); + const args = IS_WINDOWS + ? [ + "-xz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/") + ] + : ["-xz", "-f", archivePath, "-C", cachePath]; const tarPath = await io.which("tar", true); core.debug(`Tar Path: ${tarPath}`); - const archiveFileSize = fs.statSync(archivePath).size; - core.debug(`File Size: ${archiveFileSize}`); - await exec(`"${tarPath}"`, args); const isExactKeyMatch = utils.isExactKeyMatch( diff --git a/src/save.ts b/src/save.ts index 69e44cf..d660064 100644 --- a/src/save.ts +++ b/src/save.ts @@ -1,15 +1,12 @@ import * as core from "@actions/core"; import { exec } from "@actions/exec"; - import * as io from "@actions/io"; -import * as fs from "fs"; import * as path from "path"; - import * as cacheHttpClient from "./cacheHttpClient"; import { Inputs, State } from "./constants"; import * as utils from "./utils/actionUtils"; -async function run() { +async function run(): Promise { try { const state = utils.getCacheState(); @@ -27,12 +24,12 @@ async function run() { return; } - let cachePath = utils.resolvePath( + const cachePath = utils.resolvePath( core.getInput(Inputs.Path, { required: true }) ); core.debug(`Cache Path: ${cachePath}`); - let archivePath = path.join( + const archivePath = path.join( await utils.createTempDirectory(), "cache.tgz" ); @@ -40,32 +37,36 @@ async function run() { // http://man7.org/linux/man-pages/man1/tar.1.html // tar [-options] [files or directories which to add into archive] - const args = ["-cz"]; const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - args.push("--force-local"); - archivePath = archivePath.replace(/\\/g, "/"); - cachePath = cachePath.replace(/\\/g, "/"); - } - - args.push(...["-f", archivePath, "-C", cachePath, "."]); + const args = IS_WINDOWS + ? [ + "-cz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/"), + "." + ] + : ["-cz", "-f", archivePath, "-C", cachePath, "."]; const tarPath = await io.which("tar", true); core.debug(`Tar Path: ${tarPath}`); await exec(`"${tarPath}"`, args); const fileSizeLimit = 400 * 1024 * 1024; // 400MB - const archiveFileSize = fs.statSync(archivePath).size; + const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { core.warning( - `Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.` + `Cache size of ~${Math.round( + archiveFileSize / (1024 * 1024) + )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` ); return; } - const stream = fs.createReadStream(archivePath); - await cacheHttpClient.saveCache(stream, primaryKey); + await cacheHttpClient.saveCache(primaryKey, archivePath); } catch (error) { core.warning(error.message); } diff --git a/src/utils/actionUtils.ts b/src/utils/actionUtils.ts index d4d7638..ba5b2ea 100644 --- a/src/utils/actionUtils.ts +++ b/src/utils/actionUtils.ts @@ -1,10 +1,11 @@ import * as core from "@actions/core"; import * as io from "@actions/io"; +import * as fs from "fs"; import * as os from "os"; import * as path from "path"; import * as uuidV4 from "uuid/v4"; -import { Outputs, State } from "../constants"; +import { Events, Outputs, State } from "../constants"; import { ArtifactCacheEntry } from "../contracts"; // From https://github.com/actions/toolkit/blob/master/packages/tool-cache/src/tool-cache.ts#L23 @@ -32,6 +33,10 @@ export async function createTempDirectory(): Promise { return dest; } +export function getArchiveFileSize(path: string): number { + return fs.statSync(path).size; +} + export function isExactKeyMatch( key: string, cacheResult?: ArtifactCacheEntry @@ -45,10 +50,18 @@ export function isExactKeyMatch( ); } +export function setCacheState(state: ArtifactCacheEntry): void { + core.saveState(State.CacheResult, JSON.stringify(state)); +} + +export function setCacheHitOutput(isCacheHit: boolean): void { + core.setOutput(Outputs.CacheHit, isCacheHit.toString()); +} + export function setOutputAndState( key: string, cacheResult?: ArtifactCacheEntry -) { +): void { setCacheHitOutput(isExactKeyMatch(key, cacheResult)); // Store the cache result if it exists cacheResult && setCacheState(cacheResult); @@ -57,15 +70,11 @@ export function setOutputAndState( export function getCacheState(): ArtifactCacheEntry | undefined { const stateData = core.getState(State.CacheResult); core.debug(`State: ${stateData}`); - return (stateData && JSON.parse(stateData)) as ArtifactCacheEntry; -} + if (stateData) { + return JSON.parse(stateData) as ArtifactCacheEntry; + } -export function setCacheState(state: ArtifactCacheEntry) { - core.saveState(State.CacheResult, JSON.stringify(state)); -} - -export function setCacheHitOutput(isCacheHit: boolean) { - core.setOutput(Outputs.CacheHit, isCacheHit.toString()); + return undefined; } export function resolvePath(filePath: string): string { @@ -79,3 +88,15 @@ export function resolvePath(filePath: string): string { return path.resolve(filePath); } + +export function getSupportedEvents(): string[] { + return [Events.Push, Events.PullRequest]; +} + +// Currently the cache token is only authorized for push and pull_request events +// All other events will fail when reading and saving the cache +// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context +export function isValidEvent(): boolean { + const githubEvent = process.env[Events.Key] || ""; + return getSupportedEvents().includes(githubEvent); +} diff --git a/src/utils/testUtils.ts b/src/utils/testUtils.ts index 67121c7..b1d20d0 100644 --- a/src/utils/testUtils.ts +++ b/src/utils/testUtils.ts @@ -1,7 +1,29 @@ +import { Inputs } from "../constants"; + +// See: https://github.com/actions/toolkit/blob/master/packages/core/src/core.ts#L67 function getInputName(name: string): string { return `INPUT_${name.replace(/ /g, "_").toUpperCase()}`; } -export function setInput(name: string, value: string) { +export function setInput(name: string, value: string): void { process.env[getInputName(name)] = value; } + +interface CacheInput { + path: string; + key: string; + restoreKeys?: string[]; +} + +export function setInputs(input: CacheInput): void { + setInput(Inputs.Path, input.path); + setInput(Inputs.Key, input.key); + input.restoreKeys && + setInput(Inputs.RestoreKeys, input.restoreKeys.join("\n")); +} + +export function clearInputs(): void { + delete process.env[getInputName(Inputs.Path)]; + delete process.env[getInputName(Inputs.Key)]; + delete process.env[getInputName(Inputs.RestoreKeys)]; +} From 44543250bd1b1d68b70b6e9949c2a11f5b11d990 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Fri, 15 Nov 2019 10:31:02 -0500 Subject: [PATCH 05/21] Release 1.0.2 --- dist/restore/index.js | 152 ++++++++++++++++++++++++++---------------- dist/save/index.js | 148 ++++++++++++++++++++++++---------------- package-lock.json | 2 +- package.json | 2 +- 4 files changed, 185 insertions(+), 119 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 53f1ec6..cbfb8f9 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1496,6 +1496,26 @@ const fs = __importStar(__webpack_require__(747)); const Handlers_1 = __webpack_require__(941); const HttpClient_1 = __webpack_require__(874); const RestClient_1 = __webpack_require__(105); +function getCacheUrl() { + // Ideally we just use ACTIONS_CACHE_URL + const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + process.env["ACTIONS_RUNTIME_URL"] || + "").replace("pipelines", "artifactcache"); + if (!cacheUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); + } + core.debug(`Cache Url: ${cacheUrl}`); + return cacheUrl; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + }; + return requestOptions; +} function getCacheEntry(keys) { return __awaiter(this, void 0, void 0, function* () { const cacheUrl = getCacheUrl(); @@ -1522,15 +1542,6 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function downloadCache(cacheEntry, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - const stream = fs.createWriteStream(archivePath); - const httpClient = new HttpClient_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); - yield pipeResponseToStream(downloadResponse, stream); - }); -} -exports.downloadCache = downloadCache; function pipeResponseToStream(response, stream) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => { @@ -1540,8 +1551,19 @@ function pipeResponseToStream(response, stream) { }); }); } -function saveCache(stream, key) { +function downloadCache(cacheEntry, archivePath) { return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createWriteStream(archivePath); + const httpClient = new HttpClient_1.HttpClient("actions/cache"); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + yield pipeResponseToStream(downloadResponse, stream); + }); +} +exports.downloadCache = downloadCache; +function saveCache(key, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createReadStream(archivePath); const cacheUrl = getCacheUrl(); const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); @@ -1562,26 +1584,6 @@ function saveCache(stream, key) { }); } exports.saveCache = saveCache; -function getRequestOptions() { - const requestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") - }; - return requestOptions; -} -function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; -} -function getCacheUrl() { - // Ideally we just use ACTIONS_CACHE_URL - let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || - process.env["ACTIONS_RUNTIME_URL"] || - "").replace("pipelines", "artifactcache"); - if (!cacheUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); - } - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; -} /***/ }), @@ -2139,6 +2141,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); const io = __importStar(__webpack_require__(1)); +const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); const path = __importStar(__webpack_require__(622)); const uuidV4 = __importStar(__webpack_require__(826)); @@ -2170,6 +2173,10 @@ function createTempDirectory() { }); } exports.createTempDirectory = createTempDirectory; +function getArchiveFileSize(path) { + return fs.statSync(path).size; +} +exports.getArchiveFileSize = getArchiveFileSize; function isExactKeyMatch(key, cacheResult) { return !!(cacheResult && cacheResult.cacheKey && @@ -2178,6 +2185,14 @@ function isExactKeyMatch(key, cacheResult) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; +function setCacheState(state) { + core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); +} +exports.setCacheState = setCacheState; +function setCacheHitOutput(isCacheHit) { + core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); +} +exports.setCacheHitOutput = setCacheHitOutput; function setOutputAndState(key, cacheResult) { setCacheHitOutput(isExactKeyMatch(key, cacheResult)); // Store the cache result if it exists @@ -2187,17 +2202,12 @@ exports.setOutputAndState = setOutputAndState; function getCacheState() { const stateData = core.getState(constants_1.State.CacheResult); core.debug(`State: ${stateData}`); - return (stateData && JSON.parse(stateData)); + if (stateData) { + return JSON.parse(stateData); + } + return undefined; } exports.getCacheState = getCacheState; -function setCacheState(state) { - core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); -} -exports.setCacheState = setCacheState; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function resolvePath(filePath) { if (filePath[0] === "~") { const home = os.homedir(); @@ -2209,6 +2219,18 @@ function resolvePath(filePath) { return path.resolve(filePath); } exports.resolvePath = resolvePath; +function getSupportedEvents() { + return [constants_1.Events.Push, constants_1.Events.PullRequest]; +} +exports.getSupportedEvents = getSupportedEvents; +// Currently the cache token is only authorized for push and pull_request events +// All other events will fail when reading and saving the cache +// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context +function isValidEvent() { + const githubEvent = process.env[constants_1.Events.Key] || ""; + return getSupportedEvents().includes(githubEvent); +} +exports.isValidEvent = isValidEvent; /***/ }), @@ -2836,19 +2858,25 @@ function isUnixExecutable(stats) { Object.defineProperty(exports, "__esModule", { value: true }); var Inputs; (function (Inputs) { - Inputs.Key = "key"; - Inputs.Path = "path"; - Inputs.RestoreKeys = "restore-keys"; + Inputs["Key"] = "key"; + Inputs["Path"] = "path"; + Inputs["RestoreKeys"] = "restore-keys"; })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { - Outputs.CacheHit = "cache-hit"; + Outputs["CacheHit"] = "cache-hit"; })(Outputs = exports.Outputs || (exports.Outputs = {})); var State; (function (State) { - State.CacheKey = "CACHE_KEY"; - State.CacheResult = "CACHE_RESULT"; + State["CacheKey"] = "CACHE_KEY"; + State["CacheResult"] = "CACHE_RESULT"; })(State = exports.State || (exports.State = {})); +var Events; +(function (Events) { + Events["Key"] = "GITHUB_EVENT_NAME"; + Events["Push"] = "push"; + Events["PullRequest"] = "pull_request"; +})(Events = exports.Events || (exports.Events = {})); /***/ }), @@ -2959,7 +2987,6 @@ Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); -const fs = __importStar(__webpack_require__(747)); const path = __importStar(__webpack_require__(622)); const cacheHttpClient = __importStar(__webpack_require__(154)); const constants_1 = __webpack_require__(694); @@ -2968,7 +2995,12 @@ function run() { return __awaiter(this, void 0, void 0, function* () { try { // Validate inputs, this can cause task failure - let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); + if (!utils.isValidEvent()) { + core.setFailed(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils + .getSupportedEvents() + .join(", ")} events are supported at this time.`); + } + const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); core.debug(`Cache Path: ${cachePath}`); const primaryKey = core.getInput(constants_1.Inputs.Key, { required: true }); core.saveState(constants_1.State.CacheKey, primaryKey); @@ -3000,27 +3032,31 @@ function run() { core.info(`Cache not found for input keys: ${keys.join(", ")}.`); return; } - let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); + const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); core.debug(`Archive Path: ${archivePath}`); // Store the cache result utils.setCacheState(cacheEntry); // Download the cache from the cache entry yield cacheHttpClient.downloadCache(cacheEntry, archivePath); - io.mkdirP(cachePath); + const archiveFileSize = utils.getArchiveFileSize(archivePath); + core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); + // Create directory to extract tar into + yield io.mkdirP(cachePath); // http://man7.org/linux/man-pages/man1/tar.1.html // tar [-options] [files or directories which to add into archive] - const args = ["-xz"]; const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - args.push("--force-local"); - archivePath = archivePath.replace(/\\/g, "/"); - cachePath = cachePath.replace(/\\/g, "/"); - } - args.push(...["-f", archivePath, "-C", cachePath]); + const args = IS_WINDOWS + ? [ + "-xz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/") + ] + : ["-xz", "-f", archivePath, "-C", cachePath]; const tarPath = yield io.which("tar", true); core.debug(`Tar Path: ${tarPath}`); - const archiveFileSize = fs.statSync(archivePath).size; - core.debug(`File Size: ${archiveFileSize}`); yield exec_1.exec(`"${tarPath}"`, args); const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); utils.setCacheHitOutput(isExactKeyMatch); diff --git a/dist/save/index.js b/dist/save/index.js index 79c5e1c..c57ba76 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1496,6 +1496,26 @@ const fs = __importStar(__webpack_require__(747)); const Handlers_1 = __webpack_require__(941); const HttpClient_1 = __webpack_require__(874); const RestClient_1 = __webpack_require__(105); +function getCacheUrl() { + // Ideally we just use ACTIONS_CACHE_URL + const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + process.env["ACTIONS_RUNTIME_URL"] || + "").replace("pipelines", "artifactcache"); + if (!cacheUrl) { + throw new Error("Cache Service Url not found, unable to restore cache."); + } + core.debug(`Cache Url: ${cacheUrl}`); + return cacheUrl; +} +function createAcceptHeader(type, apiVersion) { + return `${type};api-version=${apiVersion}`; +} +function getRequestOptions() { + const requestOptions = { + acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + }; + return requestOptions; +} function getCacheEntry(keys) { return __awaiter(this, void 0, void 0, function* () { const cacheUrl = getCacheUrl(); @@ -1522,15 +1542,6 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function downloadCache(cacheEntry, archivePath) { - return __awaiter(this, void 0, void 0, function* () { - const stream = fs.createWriteStream(archivePath); - const httpClient = new HttpClient_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); - yield pipeResponseToStream(downloadResponse, stream); - }); -} -exports.downloadCache = downloadCache; function pipeResponseToStream(response, stream) { return __awaiter(this, void 0, void 0, function* () { return new Promise(resolve => { @@ -1540,8 +1551,19 @@ function pipeResponseToStream(response, stream) { }); }); } -function saveCache(stream, key) { +function downloadCache(cacheEntry, archivePath) { return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createWriteStream(archivePath); + const httpClient = new HttpClient_1.HttpClient("actions/cache"); + // eslint-disable-next-line @typescript-eslint/no-non-null-assertion + const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + yield pipeResponseToStream(downloadResponse, stream); + }); +} +exports.downloadCache = downloadCache; +function saveCache(key, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const stream = fs.createReadStream(archivePath); const cacheUrl = getCacheUrl(); const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); @@ -1562,26 +1584,6 @@ function saveCache(stream, key) { }); } exports.saveCache = saveCache; -function getRequestOptions() { - const requestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") - }; - return requestOptions; -} -function createAcceptHeader(type, apiVersion) { - return `${type};api-version=${apiVersion}`; -} -function getCacheUrl() { - // Ideally we just use ACTIONS_CACHE_URL - let cacheUrl = (process.env["ACTIONS_CACHE_URL"] || - process.env["ACTIONS_RUNTIME_URL"] || - "").replace("pipelines", "artifactcache"); - if (!cacheUrl) { - throw new Error("Cache Service Url not found, unable to restore cache."); - } - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; -} /***/ }), @@ -2139,6 +2141,7 @@ var __importStar = (this && this.__importStar) || function (mod) { Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); const io = __importStar(__webpack_require__(1)); +const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); const path = __importStar(__webpack_require__(622)); const uuidV4 = __importStar(__webpack_require__(826)); @@ -2170,6 +2173,10 @@ function createTempDirectory() { }); } exports.createTempDirectory = createTempDirectory; +function getArchiveFileSize(path) { + return fs.statSync(path).size; +} +exports.getArchiveFileSize = getArchiveFileSize; function isExactKeyMatch(key, cacheResult) { return !!(cacheResult && cacheResult.cacheKey && @@ -2178,6 +2185,14 @@ function isExactKeyMatch(key, cacheResult) { }) === 0); } exports.isExactKeyMatch = isExactKeyMatch; +function setCacheState(state) { + core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); +} +exports.setCacheState = setCacheState; +function setCacheHitOutput(isCacheHit) { + core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); +} +exports.setCacheHitOutput = setCacheHitOutput; function setOutputAndState(key, cacheResult) { setCacheHitOutput(isExactKeyMatch(key, cacheResult)); // Store the cache result if it exists @@ -2187,17 +2202,12 @@ exports.setOutputAndState = setOutputAndState; function getCacheState() { const stateData = core.getState(constants_1.State.CacheResult); core.debug(`State: ${stateData}`); - return (stateData && JSON.parse(stateData)); + if (stateData) { + return JSON.parse(stateData); + } + return undefined; } exports.getCacheState = getCacheState; -function setCacheState(state) { - core.saveState(constants_1.State.CacheResult, JSON.stringify(state)); -} -exports.setCacheState = setCacheState; -function setCacheHitOutput(isCacheHit) { - core.setOutput(constants_1.Outputs.CacheHit, isCacheHit.toString()); -} -exports.setCacheHitOutput = setCacheHitOutput; function resolvePath(filePath) { if (filePath[0] === "~") { const home = os.homedir(); @@ -2209,6 +2219,18 @@ function resolvePath(filePath) { return path.resolve(filePath); } exports.resolvePath = resolvePath; +function getSupportedEvents() { + return [constants_1.Events.Push, constants_1.Events.PullRequest]; +} +exports.getSupportedEvents = getSupportedEvents; +// Currently the cache token is only authorized for push and pull_request events +// All other events will fail when reading and saving the cache +// See GitHub Context https://help.github.com/actions/automating-your-workflow-with-github-actions/contexts-and-expression-syntax-for-github-actions#github-context +function isValidEvent() { + const githubEvent = process.env[constants_1.Events.Key] || ""; + return getSupportedEvents().includes(githubEvent); +} +exports.isValidEvent = isValidEvent; /***/ }), @@ -2853,7 +2875,6 @@ Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); -const fs = __importStar(__webpack_require__(747)); const path = __importStar(__webpack_require__(622)); const cacheHttpClient = __importStar(__webpack_require__(154)); const constants_1 = __webpack_require__(694); @@ -2872,32 +2893,35 @@ function run() { core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); return; } - let cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); + const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); core.debug(`Cache Path: ${cachePath}`); - let archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); + const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); core.debug(`Archive Path: ${archivePath}`); // http://man7.org/linux/man-pages/man1/tar.1.html // tar [-options] [files or directories which to add into archive] - const args = ["-cz"]; const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - args.push("--force-local"); - archivePath = archivePath.replace(/\\/g, "/"); - cachePath = cachePath.replace(/\\/g, "/"); - } - args.push(...["-f", archivePath, "-C", cachePath, "."]); + const args = IS_WINDOWS + ? [ + "-cz", + "--force-local", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + cachePath.replace(/\\/g, "/"), + "." + ] + : ["-cz", "-f", archivePath, "-C", cachePath, "."]; const tarPath = yield io.which("tar", true); core.debug(`Tar Path: ${tarPath}`); yield exec_1.exec(`"${tarPath}"`, args); const fileSizeLimit = 400 * 1024 * 1024; // 400MB - const archiveFileSize = fs.statSync(archivePath).size; + const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { - core.warning(`Cache size of ${archiveFileSize} bytes is over the 400MB limit, not saving cache.`); + core.warning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); return; } - const stream = fs.createReadStream(archivePath); - yield cacheHttpClient.saveCache(stream, primaryKey); + yield cacheHttpClient.saveCache(primaryKey, archivePath); } catch (error) { core.warning(error.message); @@ -2918,19 +2942,25 @@ exports.default = run; Object.defineProperty(exports, "__esModule", { value: true }); var Inputs; (function (Inputs) { - Inputs.Key = "key"; - Inputs.Path = "path"; - Inputs.RestoreKeys = "restore-keys"; + Inputs["Key"] = "key"; + Inputs["Path"] = "path"; + Inputs["RestoreKeys"] = "restore-keys"; })(Inputs = exports.Inputs || (exports.Inputs = {})); var Outputs; (function (Outputs) { - Outputs.CacheHit = "cache-hit"; + Outputs["CacheHit"] = "cache-hit"; })(Outputs = exports.Outputs || (exports.Outputs = {})); var State; (function (State) { - State.CacheKey = "CACHE_KEY"; - State.CacheResult = "CACHE_RESULT"; + State["CacheKey"] = "CACHE_KEY"; + State["CacheResult"] = "CACHE_RESULT"; })(State = exports.State || (exports.State = {})); +var Events; +(function (Events) { + Events["Key"] = "GITHUB_EVENT_NAME"; + Events["Push"] = "push"; + Events["PullRequest"] = "pull_request"; +})(Events = exports.Events || (exports.Events = {})); /***/ }), diff --git a/package-lock.json b/package-lock.json index 605ecc4..9821cb1 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.1", + "version": "1.0.2", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index 5bece1c..dd09d47 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.1", + "version": "1.0.2", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", From cffae9552bb9f84b9812c1ee9ea2e3c0a70a797e Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Thu, 21 Nov 2019 14:57:29 -0500 Subject: [PATCH 06/21] Release v1.0.3 --- README.md | 2 +- __tests__/actionUtils.test.ts | 10 ++++++ __tests__/restore.test.ts | 22 +++++--------- __tests__/save.test.ts | 57 ++++++++++++++++++++++++----------- dist/restore/index.js | 15 ++++++--- dist/save/index.js | 22 +++++++++++--- package-lock.json | 2 +- package.json | 2 +- src/cacheHttpClient.ts | 5 +-- src/restore.ts | 5 +-- src/save.ts | 19 +++++++++--- src/utils/actionUtils.ts | 5 +++ 12 files changed, 114 insertions(+), 52 deletions(-) diff --git a/README.md b/README.md index 210c51d..1278fae 100644 --- a/README.md +++ b/README.md @@ -2,7 +2,7 @@ This GitHub Action allows caching dependencies and build outputs to improve workflow execution time. -GitHub Actions status +GitHub Actions status ## Documentation diff --git a/__tests__/actionUtils.test.ts b/__tests__/actionUtils.test.ts index 4688b5d..f46d65d 100644 --- a/__tests__/actionUtils.test.ts +++ b/__tests__/actionUtils.test.ts @@ -162,6 +162,16 @@ test("getCacheState with valid state", () => { expect(getStateMock).toHaveBeenCalledTimes(1); }); +test("logWarning logs a message with a warning prefix", () => { + const message = "A warning occurred."; + + const infoMock = jest.spyOn(core, "info"); + + actionUtils.logWarning(message); + + expect(infoMock).toHaveBeenCalledWith(`[warning]${message}`); +}); + test("isValidEvent returns false for unknown event", () => { const event = "foo"; process.env[Events.Key] = event; diff --git a/__tests__/restore.test.ts b/__tests__/restore.test.ts index 1919e30..78b00ec 100644 --- a/__tests__/restore.test.ts +++ b/__tests__/restore.test.ts @@ -50,14 +50,16 @@ afterEach(() => { delete process.env[Events.Key]; }); -test("restore with invalid event", async () => { +test("restore with invalid event outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const invalidEvent = "commit_comment"; process.env[Events.Key] = invalidEvent; await run(); - expect(failedMock).toHaveBeenCalledWith( + expect(logWarningMock).toHaveBeenCalledWith( `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` ); + expect(failedMock).toHaveBeenCalledTimes(0); }); test("restore with no path should fail", async () => { @@ -126,7 +128,6 @@ test("restore with no cache found", async () => { }); const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -138,7 +139,6 @@ test("restore with no cache found", async () => { await run(); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); expect(infoMock).toHaveBeenCalledWith( @@ -153,7 +153,7 @@ test("restore with server error should fail", async () => { key }); - const warningMock = jest.spyOn(core, "warning"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -168,8 +168,8 @@ test("restore with server error should fail", async () => { expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); - expect(warningMock).toHaveBeenCalledTimes(1); - expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); @@ -187,7 +187,6 @@ test("restore with restore keys and no cache found", async () => { }); const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -199,7 +198,6 @@ test("restore with restore keys and no cache found", async () => { await run(); expect(stateMock).toHaveBeenCalledWith("CACHE_KEY", key); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); expect(infoMock).toHaveBeenCalledWith( @@ -216,7 +214,6 @@ test("restore with cache found", async () => { }); const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -281,7 +278,6 @@ test("restore with cache found", async () => { expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); @@ -296,7 +292,6 @@ test("restore with a pull request event and cache found", async () => { process.env[Events.Key] = Events.PullRequest; const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -362,7 +357,6 @@ test("restore with a pull request event and cache found", async () => { expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); expect(infoMock).toHaveBeenCalledWith(`Cache restored from key: ${key}`); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); @@ -377,7 +371,6 @@ test("restore with cache found for restore key", async () => { }); const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const stateMock = jest.spyOn(core, "saveState"); @@ -445,6 +438,5 @@ test("restore with cache found for restore key", async () => { expect(infoMock).toHaveBeenCalledWith( `Cache restored from key: ${restoreKey}` ); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts index 67b13d2..89657c4 100644 --- a/__tests__/save.test.ts +++ b/__tests__/save.test.ts @@ -3,7 +3,7 @@ import * as exec from "@actions/exec"; import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "../src/cacheHttpClient"; -import { Inputs } from "../src/constants"; +import { Events, Inputs } from "../src/constants"; import { ArtifactCacheEntry } from "../src/contracts"; import run from "../src/save"; import * as actionUtils from "../src/utils/actionUtils"; @@ -32,6 +32,16 @@ beforeAll(() => { } ); + jest.spyOn(actionUtils, "isValidEvent").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.isValidEvent(); + }); + + jest.spyOn(actionUtils, "getSupportedEvents").mockImplementation(() => { + const actualUtils = jest.requireActual("../src/utils/actionUtils"); + return actualUtils.getSupportedEvents(); + }); + jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { return path.resolve(filePath); }); @@ -45,12 +55,29 @@ beforeAll(() => { }); }); +beforeEach(() => { + process.env[Events.Key] = Events.Push; +}); + afterEach(() => { testUtils.clearInputs(); + delete process.env[Events.Key]; +}); + +test("save with invalid event outputs warning", async () => { + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + const invalidEvent = "commit_comment"; + process.env[Events.Key] = invalidEvent; + await run(); + expect(logWarningMock).toHaveBeenCalledWith( + `Event Validation Error: The event type ${invalidEvent} is not supported. Only push, pull_request events are supported at this time.` + ); + expect(failedMock).toHaveBeenCalledTimes(0); }); test("save with no primary key in state outputs warning", async () => { - const warningMock = jest.spyOn(core, "warning"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const cacheEntry: ArtifactCacheEntry = { @@ -72,16 +99,15 @@ test("save with no primary key in state outputs warning", async () => { await run(); - expect(warningMock).toHaveBeenCalledWith( + expect(logWarningMock).toHaveBeenCalledWith( `Error retrieving key from state.` ); - expect(warningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledTimes(1); expect(failedMock).toHaveBeenCalledTimes(0); }); test("save with exact match returns early", async () => { const infoMock = jest.spyOn(core, "info"); - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; @@ -112,12 +138,11 @@ test("save with exact match returns early", async () => { expect(execMock).toHaveBeenCalledTimes(0); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); test("save with missing input outputs warning", async () => { - const warningMock = jest.spyOn(core, "warning"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; @@ -140,15 +165,15 @@ test("save with missing input outputs warning", async () => { await run(); - expect(warningMock).toHaveBeenCalledWith( + expect(logWarningMock).toHaveBeenCalledWith( "Input required and not supplied: path" ); - expect(warningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledTimes(1); expect(failedMock).toHaveBeenCalledTimes(0); }); test("save with large cache outputs warning", async () => { - const warningMock = jest.spyOn(core, "warning"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; @@ -200,8 +225,8 @@ test("save with large cache outputs warning", async () => { expect(execMock).toHaveBeenCalledTimes(1); expect(execMock).toHaveBeenCalledWith(`"tar"`, args); - expect(warningMock).toHaveBeenCalledTimes(1); - expect(warningMock).toHaveBeenCalledWith( + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith( "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." ); @@ -209,7 +234,7 @@ test("save with large cache outputs warning", async () => { }); test("save with server error outputs warning", async () => { - const warningMock = jest.spyOn(core, "warning"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; @@ -265,14 +290,13 @@ test("save with server error outputs warning", async () => { expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); - expect(warningMock).toHaveBeenCalledTimes(1); - expect(warningMock).toHaveBeenCalledWith("HTTP Error Occurred"); + expect(logWarningMock).toHaveBeenCalledTimes(1); + expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); expect(failedMock).toHaveBeenCalledTimes(0); }); test("save with valid inputs uploads a cache", async () => { - const warningMock = jest.spyOn(core, "warning"); const failedMock = jest.spyOn(core, "setFailed"); const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; @@ -324,6 +348,5 @@ test("save with valid inputs uploads a cache", async () => { expect(saveCacheMock).toHaveBeenCalledTimes(1); expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); - expect(warningMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); diff --git a/dist/restore/index.js b/dist/restore/index.js index cbfb8f9..f449334 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1533,11 +1533,12 @@ function getCacheEntry(keys) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - core.debug(`Cache Result:`); - core.debug(JSON.stringify(cacheResult)); if (!cacheResult || !cacheResult.archiveLocation) { throw new Error("Cache not found."); } + core.setSecret(cacheResult.archiveLocation); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -2208,6 +2209,11 @@ function getCacheState() { return undefined; } exports.getCacheState = getCacheState; +function logWarning(message) { + const warningPrefix = "[warning]"; + core.info(`${warningPrefix}${message}`); +} +exports.logWarning = logWarning; function resolvePath(filePath) { if (filePath[0] === "~") { const home = os.homedir(); @@ -2996,9 +3002,10 @@ function run() { try { // Validate inputs, this can cause task failure if (!utils.isValidEvent()) { - core.setFailed(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils + utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils .getSupportedEvents() .join(", ")} events are supported at this time.`); + return; } const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); core.debug(`Cache Path: ${cachePath}`); @@ -3063,7 +3070,7 @@ function run() { core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); } catch (error) { - core.warning(error.message); + utils.logWarning(error.message); utils.setCacheHitOutput(false); } } diff --git a/dist/save/index.js b/dist/save/index.js index c57ba76..534e0cf 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1533,11 +1533,12 @@ function getCacheEntry(keys) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - core.debug(`Cache Result:`); - core.debug(JSON.stringify(cacheResult)); if (!cacheResult || !cacheResult.archiveLocation) { throw new Error("Cache not found."); } + core.setSecret(cacheResult.archiveLocation); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); return cacheResult; }); } @@ -2208,6 +2209,11 @@ function getCacheState() { return undefined; } exports.getCacheState = getCacheState; +function logWarning(message) { + const warningPrefix = "[warning]"; + core.info(`${warningPrefix}${message}`); +} +exports.logWarning = logWarning; function resolvePath(filePath) { if (filePath[0] === "~") { const home = os.homedir(); @@ -2882,11 +2888,17 @@ const utils = __importStar(__webpack_require__(443)); function run() { return __awaiter(this, void 0, void 0, function* () { try { + if (!utils.isValidEvent()) { + utils.logWarning(`Event Validation Error: The event type ${process.env[constants_1.Events.Key]} is not supported. Only ${utils + .getSupportedEvents() + .join(", ")} events are supported at this time.`); + return; + } const state = utils.getCacheState(); // Inputs are re-evaluted before the post action, so we want the original key used for restore const primaryKey = core.getState(constants_1.State.CacheKey); if (!primaryKey) { - core.warning(`Error retrieving key from state.`); + utils.logWarning(`Error retrieving key from state.`); return; } if (utils.isExactKeyMatch(primaryKey, state)) { @@ -2918,13 +2930,13 @@ function run() { const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { - core.warning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); + utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); return; } yield cacheHttpClient.saveCache(primaryKey, archivePath); } catch (error) { - core.warning(error.message); + utils.logWarning(error.message); } }); } diff --git a/package-lock.json b/package-lock.json index 9821cb1..2e8413e 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.2", + "version": "1.0.3", "lockfileVersion": 1, "requires": true, "dependencies": { diff --git a/package.json b/package.json index dd09d47..42fbdbe 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.2", + "version": "1.0.3", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index e448157..8a2014f 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -61,11 +61,12 @@ export async function getCacheEntry( throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - core.debug(`Cache Result:`); - core.debug(JSON.stringify(cacheResult)); if (!cacheResult || !cacheResult.archiveLocation) { throw new Error("Cache not found."); } + core.setSecret(cacheResult.archiveLocation); + core.debug(`Cache Result:`); + core.debug(JSON.stringify(cacheResult)); return cacheResult; } diff --git a/src/restore.ts b/src/restore.ts index 87a2684..15570cd 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -10,13 +10,14 @@ async function run(): Promise { try { // Validate inputs, this can cause task failure if (!utils.isValidEvent()) { - core.setFailed( + utils.logWarning( `Event Validation Error: The event type ${ process.env[Events.Key] } is not supported. Only ${utils .getSupportedEvents() .join(", ")} events are supported at this time.` ); + return; } const cachePath = utils.resolvePath( @@ -118,7 +119,7 @@ async function run(): Promise { `Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}` ); } catch (error) { - core.warning(error.message); + utils.logWarning(error.message); utils.setCacheHitOutput(false); } } catch (error) { diff --git a/src/save.ts b/src/save.ts index d660064..21f32d3 100644 --- a/src/save.ts +++ b/src/save.ts @@ -3,17 +3,28 @@ import { exec } from "@actions/exec"; import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "./cacheHttpClient"; -import { Inputs, State } from "./constants"; +import { Events, Inputs, State } from "./constants"; import * as utils from "./utils/actionUtils"; async function run(): Promise { try { + if (!utils.isValidEvent()) { + utils.logWarning( + `Event Validation Error: The event type ${ + process.env[Events.Key] + } is not supported. Only ${utils + .getSupportedEvents() + .join(", ")} events are supported at this time.` + ); + return; + } + const state = utils.getCacheState(); // Inputs are re-evaluted before the post action, so we want the original key used for restore const primaryKey = core.getState(State.CacheKey); if (!primaryKey) { - core.warning(`Error retrieving key from state.`); + utils.logWarning(`Error retrieving key from state.`); return; } @@ -58,7 +69,7 @@ async function run(): Promise { const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { - core.warning( + utils.logWarning( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` @@ -68,7 +79,7 @@ async function run(): Promise { await cacheHttpClient.saveCache(primaryKey, archivePath); } catch (error) { - core.warning(error.message); + utils.logWarning(error.message); } } diff --git a/src/utils/actionUtils.ts b/src/utils/actionUtils.ts index ba5b2ea..f6369fb 100644 --- a/src/utils/actionUtils.ts +++ b/src/utils/actionUtils.ts @@ -77,6 +77,11 @@ export function getCacheState(): ArtifactCacheEntry | undefined { return undefined; } +export function logWarning(message: string): void { + const warningPrefix = "[warning]"; + core.info(`${warningPrefix}${message}`); +} + export function resolvePath(filePath: string): string { if (filePath[0] === "~") { const home = os.homedir(); From 4c4974aff182a0ee174fefeba291c7741b0dce89 Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Mon, 6 Jan 2020 13:36:33 -0500 Subject: [PATCH 07/21] Release v1.1 --- README.md | 27 +++- __tests__/restore.test.ts | 87 ++++-------- __tests__/save.test.ts | 152 ++++++++++++--------- __tests__/tar.test.ts | 58 ++++++++ dist/restore/index.js | 265 ++++++++++++++++++++++++++++-------- dist/save/index.js | 273 ++++++++++++++++++++++++++++++-------- examples.md | 58 +++++--- package-lock.json | 26 ++-- package.json | 6 +- src/cacheHttpClient.ts | 250 +++++++++++++++++++++++++++------- src/contracts.d.ts | 13 ++ src/restore.ts | 32 +---- src/save.ts | 38 +++--- src/tar.ts | 47 +++++++ 14 files changed, 959 insertions(+), 373 deletions(-) create mode 100644 __tests__/tar.test.ts create mode 100644 src/tar.ts diff --git a/README.md b/README.md index 1278fae..02cf6fe 100644 --- a/README.md +++ b/README.md @@ -35,7 +35,7 @@ on: push jobs: build: runs-on: ubuntu-latest - + steps: - uses: actions/checkout@v1 @@ -49,14 +49,31 @@ jobs: - name: Generate Prime Numbers if: steps.cache-primes.outputs.cache-hit != 'true' run: /generate-primes.sh -d prime-numbers - + - name: Use Prime Numbers run: /primes.sh -d prime-numbers ``` -## Ecosystem Examples +## Implementation Examples + +Every programming language and framework has its own way of caching. + +See [Examples](examples.md) for a list of `actions/cache` implementations for use with: + +- [C# - Nuget](./examples.md#c---nuget) +- [Elixir - Mix](./examples.md#elixir---mix) +- [Go - Modules](./examples.md#go---modules) +- [Java - Gradle](./examples.md#java---gradle) +- [Java - Maven](./examples.md#java---maven) +- [Node - npm](./examples.md#node---npm) +- [Node - Yarn](./examples.md#node---yarn) +- [PHP - Composer](./examples.md#php---composer) +- [Python - pip](./examples.md#python---pip) +- [Ruby - Gem](./examples.md#ruby---gem) +- [Rust - Cargo](./examples.md#rust---cargo) +- [Swift, Objective-C - Carthage](./examples.md#swift-objective-c---carthage) +- [Swift, Objective-C - CocoaPods](./examples.md#swift-objective-c---cocoapods) -See [Examples](examples.md) ## Cache Limits @@ -76,7 +93,7 @@ steps: with: path: path/to/dependencies key: ${{ runner.os }}-${{ hashFiles('**/lockfiles') }} - + - name: Install Dependencies if: steps.cache.outputs.cache-hit != 'true' run: /install.sh diff --git a/__tests__/restore.test.ts b/__tests__/restore.test.ts index 78b00ec..c96a2d6 100644 --- a/__tests__/restore.test.ts +++ b/__tests__/restore.test.ts @@ -1,18 +1,16 @@ import * as core from "@actions/core"; -import * as exec from "@actions/exec"; -import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "../src/cacheHttpClient"; import { Events, Inputs } from "../src/constants"; import { ArtifactCacheEntry } from "../src/contracts"; import run from "../src/restore"; +import * as tar from "../src/tar"; import * as actionUtils from "../src/utils/actionUtils"; import * as testUtils from "../src/utils/testUtils"; -jest.mock("@actions/exec"); -jest.mock("@actions/io"); -jest.mock("../src/utils/actionUtils"); jest.mock("../src/cacheHttpClient"); +jest.mock("../src/tar"); +jest.mock("../src/utils/actionUtils"); beforeAll(() => { jest.spyOn(actionUtils, "resolvePath").mockImplementation(filePath => { @@ -35,10 +33,6 @@ beforeAll(() => { const actualUtils = jest.requireActual("../src/utils/actionUtils"); return actualUtils.getSupportedEvents(); }); - - jest.spyOn(io, "which").mockImplementation(tool => { - return Promise.resolve(tool); - }); }); beforeEach(() => { @@ -245,8 +239,7 @@ test("restore with cache found", async () => { .spyOn(actionUtils, "getArchiveFileSize") .mockReturnValue(fileSize); - const mkdirMock = jest.spyOn(io, "mkdirP"); - const execMock = jest.spyOn(exec, "exec"); + const extractTarMock = jest.spyOn(tar, "extractTar"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); await run(); @@ -255,24 +248,14 @@ test("restore with cache found", async () => { expect(getCacheMock).toHaveBeenCalledWith([key]); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); - expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); - expect(mkdirMock).toHaveBeenCalledWith(cachePath); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-xz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/") - ] - : ["-xz", "-f", archivePath, "-C", cachePath]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(extractTarMock).toHaveBeenCalledTimes(1); + expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); @@ -323,8 +306,7 @@ test("restore with a pull request event and cache found", async () => { .spyOn(actionUtils, "getArchiveFileSize") .mockReturnValue(fileSize); - const mkdirMock = jest.spyOn(io, "mkdirP"); - const execMock = jest.spyOn(exec, "exec"); + const extractTarMock = jest.spyOn(tar, "extractTar"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); await run(); @@ -333,25 +315,15 @@ test("restore with a pull request event and cache found", async () => { expect(getCacheMock).toHaveBeenCalledWith([key]); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); - expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~60 MB (62915000 B)`); - expect(mkdirMock).toHaveBeenCalledWith(cachePath); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-xz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/") - ] - : ["-xz", "-f", archivePath, "-C", cachePath]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(extractTarMock).toHaveBeenCalledTimes(1); + expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledWith(true); @@ -402,8 +374,7 @@ test("restore with cache found for restore key", async () => { .spyOn(actionUtils, "getArchiveFileSize") .mockReturnValue(fileSize); - const mkdirMock = jest.spyOn(io, "mkdirP"); - const execMock = jest.spyOn(exec, "exec"); + const extractTarMock = jest.spyOn(tar, "extractTar"); const setCacheHitOutputMock = jest.spyOn(actionUtils, "setCacheHitOutput"); await run(); @@ -412,25 +383,15 @@ test("restore with cache found for restore key", async () => { expect(getCacheMock).toHaveBeenCalledWith([key, restoreKey]); expect(setCacheStateMock).toHaveBeenCalledWith(cacheEntry); expect(createTempDirectoryMock).toHaveBeenCalledTimes(1); - expect(downloadCacheMock).toHaveBeenCalledWith(cacheEntry, archivePath); + expect(downloadCacheMock).toHaveBeenCalledWith( + cacheEntry.archiveLocation, + archivePath + ); expect(getArchiveFileSizeMock).toHaveBeenCalledWith(archivePath); expect(infoMock).toHaveBeenCalledWith(`Cache Size: ~0 MB (142 B)`); - expect(mkdirMock).toHaveBeenCalledWith(cachePath); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-xz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/") - ] - : ["-xz", "-f", archivePath, "-C", cachePath]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(extractTarMock).toHaveBeenCalledTimes(1); + expect(extractTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(setCacheHitOutputMock).toHaveBeenCalledTimes(1); expect(setCacheHitOutputMock).toHaveBeenCalledWith(false); diff --git a/__tests__/save.test.ts b/__tests__/save.test.ts index 89657c4..b355076 100644 --- a/__tests__/save.test.ts +++ b/__tests__/save.test.ts @@ -1,19 +1,17 @@ import * as core from "@actions/core"; -import * as exec from "@actions/exec"; -import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "../src/cacheHttpClient"; import { Events, Inputs } from "../src/constants"; import { ArtifactCacheEntry } from "../src/contracts"; import run from "../src/save"; +import * as tar from "../src/tar"; import * as actionUtils from "../src/utils/actionUtils"; import * as testUtils from "../src/utils/testUtils"; jest.mock("@actions/core"); -jest.mock("@actions/exec"); -jest.mock("@actions/io"); -jest.mock("../src/utils/actionUtils"); jest.mock("../src/cacheHttpClient"); +jest.mock("../src/tar"); +jest.mock("../src/utils/actionUtils"); beforeAll(() => { jest.spyOn(core, "getInput").mockImplementation((name, options) => { @@ -49,10 +47,6 @@ beforeAll(() => { jest.spyOn(actionUtils, "createTempDirectory").mockImplementation(() => { return Promise.resolve("/foo/bar"); }); - - jest.spyOn(io, "which").mockImplementation(tool => { - return Promise.resolve(tool); - }); }); beforeEach(() => { @@ -128,7 +122,7 @@ test("save with exact match returns early", async () => { return primaryKey; }); - const execMock = jest.spyOn(exec, "exec"); + const createTarMock = jest.spyOn(tar, "createTar"); await run(); @@ -136,7 +130,7 @@ test("save with exact match returns early", async () => { `Cache hit occurred on the primary key ${primaryKey}, not saving cache.` ); - expect(execMock).toHaveBeenCalledTimes(0); + expect(createTarMock).toHaveBeenCalledTimes(0); expect(failedMock).toHaveBeenCalledTimes(0); }); @@ -198,9 +192,9 @@ test("save with large cache outputs warning", async () => { const cachePath = path.resolve(inputPath); testUtils.setInput(Inputs.Path, inputPath); - const execMock = jest.spyOn(exec, "exec"); + const createTarMock = jest.spyOn(tar, "createTar"); - const cacheSize = 1024 * 1024 * 1024; //~1GB, over the 400MB limit + const cacheSize = 4 * 1024 * 1024 * 1024; //~4GB, over the 2GB limit jest.spyOn(actionUtils, "getArchiveFileSize").mockImplementationOnce(() => { return cacheSize; }); @@ -209,30 +203,68 @@ test("save with large cache outputs warning", async () => { const archivePath = path.join("/foo/bar", "cache.tgz"); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-cz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/"), - "." - ] - : ["-cz", "-f", archivePath, "-C", cachePath, "."]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(createTarMock).toHaveBeenCalledTimes(1); + expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledWith( - "Cache size of ~1024 MB (1073741824 B) is over the 400MB limit, not saving cache." + "Cache size of ~4096 MB (4294967296 B) is over the 2GB limit, not saving cache." ); expect(failedMock).toHaveBeenCalledTimes(0); }); +test("save with reserve cache failure outputs warning", async () => { + const infoMock = jest.spyOn(core, "info"); + const logWarningMock = jest.spyOn(actionUtils, "logWarning"); + const failedMock = jest.spyOn(core, "setFailed"); + + const primaryKey = "Linux-node-bb828da54c148048dd17899ba9fda624811cfb43"; + const cacheEntry: ArtifactCacheEntry = { + cacheKey: "Linux-node-", + scope: "refs/heads/master", + creationTime: "2019-11-13T19:18:02+00:00", + archiveLocation: "www.actionscache.test/download" + }; + + jest.spyOn(core, "getState") + // Cache Entry State + .mockImplementationOnce(() => { + return JSON.stringify(cacheEntry); + }) + // Cache Key State + .mockImplementationOnce(() => { + return primaryKey; + }); + + const inputPath = "node_modules"; + testUtils.setInput(Inputs.Path, inputPath); + + const reserveCacheMock = jest + .spyOn(cacheHttpClient, "reserveCache") + .mockImplementationOnce(() => { + return Promise.resolve(-1); + }); + + const createTarMock = jest.spyOn(tar, "createTar"); + + const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); + + await run(); + + expect(reserveCacheMock).toHaveBeenCalledTimes(1); + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); + + expect(infoMock).toHaveBeenCalledWith( + `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ); + + expect(createTarMock).toHaveBeenCalledTimes(0); + expect(saveCacheMock).toHaveBeenCalledTimes(0); + expect(logWarningMock).toHaveBeenCalledTimes(0); + expect(failedMock).toHaveBeenCalledTimes(0); +}); + test("save with server error outputs warning", async () => { const logWarningMock = jest.spyOn(actionUtils, "logWarning"); const failedMock = jest.spyOn(core, "setFailed"); @@ -259,7 +291,14 @@ test("save with server error outputs warning", async () => { const cachePath = path.resolve(inputPath); testUtils.setInput(Inputs.Path, inputPath); - const execMock = jest.spyOn(exec, "exec"); + const cacheId = 4; + const reserveCacheMock = jest + .spyOn(cacheHttpClient, "reserveCache") + .mockImplementationOnce(() => { + return Promise.resolve(cacheId); + }); + + const createTarMock = jest.spyOn(tar, "createTar"); const saveCacheMock = jest .spyOn(cacheHttpClient, "saveCache") @@ -269,26 +308,16 @@ test("save with server error outputs warning", async () => { await run(); + expect(reserveCacheMock).toHaveBeenCalledTimes(1); + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); + const archivePath = path.join("/foo/bar", "cache.tgz"); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-cz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/"), - "." - ] - : ["-cz", "-f", archivePath, "-C", cachePath, "."]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(createTarMock).toHaveBeenCalledTimes(1); + expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(saveCacheMock).toHaveBeenCalledTimes(1); - expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(logWarningMock).toHaveBeenCalledTimes(1); expect(logWarningMock).toHaveBeenCalledWith("HTTP Error Occurred"); @@ -321,32 +350,29 @@ test("save with valid inputs uploads a cache", async () => { const cachePath = path.resolve(inputPath); testUtils.setInput(Inputs.Path, inputPath); - const execMock = jest.spyOn(exec, "exec"); + const cacheId = 4; + const reserveCacheMock = jest + .spyOn(cacheHttpClient, "reserveCache") + .mockImplementationOnce(() => { + return Promise.resolve(cacheId); + }); + + const createTarMock = jest.spyOn(tar, "createTar"); const saveCacheMock = jest.spyOn(cacheHttpClient, "saveCache"); await run(); + expect(reserveCacheMock).toHaveBeenCalledTimes(1); + expect(reserveCacheMock).toHaveBeenCalledWith(primaryKey); + const archivePath = path.join("/foo/bar", "cache.tgz"); - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-cz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/"), - "." - ] - : ["-cz", "-f", archivePath, "-C", cachePath, "."]; - - expect(execMock).toHaveBeenCalledTimes(1); - expect(execMock).toHaveBeenCalledWith(`"tar"`, args); + expect(createTarMock).toHaveBeenCalledTimes(1); + expect(createTarMock).toHaveBeenCalledWith(archivePath, cachePath); expect(saveCacheMock).toHaveBeenCalledTimes(1); - expect(saveCacheMock).toHaveBeenCalledWith(primaryKey, archivePath); + expect(saveCacheMock).toHaveBeenCalledWith(cacheId, archivePath); expect(failedMock).toHaveBeenCalledTimes(0); }); diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts new file mode 100644 index 0000000..55ff4c7 --- /dev/null +++ b/__tests__/tar.test.ts @@ -0,0 +1,58 @@ +import * as exec from "@actions/exec"; +import * as io from "@actions/io"; +import * as tar from "../src/tar"; + +jest.mock("@actions/exec"); +jest.mock("@actions/io"); + +beforeAll(() => { + jest.spyOn(io, "which").mockImplementation(tool => { + return Promise.resolve(tool); + }); +}); + +test("extract tar", async () => { + const mkdirMock = jest.spyOn(io, "mkdirP"); + const execMock = jest.spyOn(exec, "exec"); + + const archivePath = "cache.tar"; + const targetDirectory = "~/.npm/cache"; + await tar.extractTar(archivePath, targetDirectory); + + expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); + + const IS_WINDOWS = process.platform === "win32"; + const tarPath = IS_WINDOWS + ? `${process.env["windir"]}\\System32\\tar.exe` + : "tar"; + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ + "-xz", + "-f", + archivePath, + "-C", + targetDirectory + ]); +}); + +test("create tar", async () => { + const execMock = jest.spyOn(exec, "exec"); + + const archivePath = "cache.tar"; + const sourceDirectory = "~/.npm/cache"; + await tar.createTar(archivePath, sourceDirectory); + + const IS_WINDOWS = process.platform === "win32"; + const tarPath = IS_WINDOWS + ? `${process.env["windir"]}\\System32\\tar.exe` + : "tar"; + expect(execMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ + "-cz", + "-f", + archivePath, + "-C", + sourceDirectory, + "." + ]); +}); diff --git a/dist/restore/index.js b/dist/restore/index.js index f449334..af5fd45 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1496,47 +1496,63 @@ const fs = __importStar(__webpack_require__(747)); const Handlers_1 = __webpack_require__(941); const HttpClient_1 = __webpack_require__(874); const RestClient_1 = __webpack_require__(105); -function getCacheUrl() { +const utils = __importStar(__webpack_require__(443)); +function isSuccessStatusCode(statusCode) { + return statusCode >= 200 && statusCode < 300; +} +function isRetryableStatusCode(statusCode) { + const retryableStatusCodes = [ + HttpClient_1.HttpCodes.BadGateway, + HttpClient_1.HttpCodes.ServiceUnavailable, + HttpClient_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +function getCacheApiUrl() { // Ideally we just use ACTIONS_CACHE_URL - const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + const baseUrl = (process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RUNTIME_URL"] || "").replace("pipelines", "artifactcache"); - if (!cacheUrl) { + if (!baseUrl) { throw new Error("Cache Service Url not found, unable to restore cache."); } - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; + core.debug(`Cache Url: ${baseUrl}`); + return `${baseUrl}_apis/artifactcache/`; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { const requestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") }; return requestOptions; } +function createRestClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ + bearerCredentialHandler + ]); +} function getCacheEntry(keys) { + var _a; return __awaiter(this, void 0, void 0, function* () { - const cacheUrl = getCacheUrl(); - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); - const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; - const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ - bearerCredentialHandler - ]); + const restClient = createRestClient(); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; const response = yield restClient.get(resource, getRequestOptions()); if (response.statusCode === 204) { return null; } - if (response.statusCode !== 200) { + if (!isSuccessStatusCode(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - if (!cacheResult || !cacheResult.archiveLocation) { + const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; + if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core.setSecret(cacheResult.archiveLocation); + core.setSecret(cacheDownloadUrl); core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); return cacheResult; @@ -1552,34 +1568,117 @@ function pipeResponseToStream(response, stream) { }); }); } -function downloadCache(cacheEntry, archivePath) { +function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient_1.HttpClient("actions/cache"); - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + const downloadResponse = yield httpClient.get(archiveLocation); yield pipeResponseToStream(downloadResponse, stream); }); } exports.downloadCache = downloadCache; -function saveCache(key, archivePath) { +// Reserve Cache +function reserveCache(key) { + var _a, _b, _c; return __awaiter(this, void 0, void 0, function* () { - const stream = fs.createReadStream(archivePath); - const cacheUrl = getCacheUrl(); - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); - const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; - const postUrl = cacheUrl + resource; - const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ - bearerCredentialHandler - ]); + const restClient = createRestClient(); + const reserveCacheRequest = { + key + }; + const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); + return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(restClient, resourceUrl, data, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const requestOptions = getRequestOptions(); requestOptions.additionalHeaders = { - "Content-Type": "application/octet-stream" + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) }; - const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); - if (response.statusCode !== 200) { - throw new Error(`Cache service responded with ${response.statusCode}`); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); + }); + const response = yield uploadChunkRequest(); + if (isSuccessStatusCode(response.statusCode)) { + return; + } + if (isRetryableStatusCode(response.statusCode)) { + core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); + const retryResponse = yield uploadChunkRequest(); + if (isSuccessStatusCode(retryResponse.statusCode)) { + return; + } + } + throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`); + }); +} +function uploadFile(restClient, cacheId, archivePath) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size; + const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); + const fd = fs.openSync(archivePath, "r"); + const concurrency = (_a = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = (_b = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks + core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug("Awaiting all uploads"); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + const chunk = fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }); + yield uploadChunk(restClient, resourceUrl, chunk, start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(restClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const requestOptions = getRequestOptions(); + const commitCacheRequest = { size: filesize }; + return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); + }); +} +function saveCache(cacheId, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const restClient = createRestClient(); + core.debug("Upload cache"); + yield uploadFile(restClient, cacheId, archivePath); + // Commit Cache + core.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSize(archivePath); + const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info("Cache saved successfully"); }); @@ -2991,13 +3090,13 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const exec_1 = __webpack_require__(986); -const io = __importStar(__webpack_require__(1)); const path = __importStar(__webpack_require__(622)); const cacheHttpClient = __importStar(__webpack_require__(154)); const constants_1 = __webpack_require__(694); +const tar_1 = __webpack_require__(943); const utils = __importStar(__webpack_require__(443)); function run() { + var _a; return __awaiter(this, void 0, void 0, function* () { try { // Validate inputs, this can cause task failure @@ -3035,7 +3134,7 @@ function run() { } try { const cacheEntry = yield cacheHttpClient.getCacheEntry(keys); - if (!cacheEntry) { + if (!((_a = cacheEntry) === null || _a === void 0 ? void 0 : _a.archiveLocation)) { core.info(`Cache not found for input keys: ${keys.join(", ")}.`); return; } @@ -3044,27 +3143,10 @@ function run() { // Store the cache result utils.setCacheState(cacheEntry); // Download the cache from the cache entry - yield cacheHttpClient.downloadCache(cacheEntry, archivePath); + yield cacheHttpClient.downloadCache(cacheEntry.archiveLocation, archivePath); const archiveFileSize = utils.getArchiveFileSize(archivePath); core.info(`Cache Size: ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B)`); - // Create directory to extract tar into - yield io.mkdirP(cachePath); - // http://man7.org/linux/man-pages/man1/tar.1.html - // tar [-options] [files or directories which to add into archive] - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-xz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/") - ] - : ["-xz", "-f", archivePath, "-C", cachePath]; - const tarPath = yield io.which("tar", true); - core.debug(`Tar Path: ${tarPath}`); - yield exec_1.exec(`"${tarPath}"`, args); + yield tar_1.extractTar(archivePath, cachePath); const isExactKeyMatch = utils.isExactKeyMatch(primaryKey, cacheEntry); utils.setCacheHitOutput(isExactKeyMatch); core.info(`Cache restored from key: ${cacheEntry && cacheEntry.cacheKey}`); @@ -5160,6 +5242,79 @@ var personalaccesstoken_1 = __webpack_require__(327); exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; +/***/ }), + +/***/ 943: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const exec_1 = __webpack_require__(986); +const io = __importStar(__webpack_require__(1)); +const fs_1 = __webpack_require__(747); +function getTarPath() { + return __awaiter(this, void 0, void 0, function* () { + // Explicitly use BSD Tar on Windows + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; + if (fs_1.existsSync(systemTar)) { + return systemTar; + } + } + return yield io.which("tar", true); + }); +} +function execTar(args) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath()}"`, args); + } + catch (error) { + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); + } + throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + } + }); +} +function extractTar(archivePath, targetDirectory) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + yield io.mkdirP(targetDirectory); + const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + yield execTar(args); + }); +} +exports.extractTar = extractTar; +function createTar(archivePath, sourceDirectory) { + return __awaiter(this, void 0, void 0, function* () { + const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + yield execTar(args); + }); +} +exports.createTar = createTar; + + /***/ }), /***/ 986: diff --git a/dist/save/index.js b/dist/save/index.js index 534e0cf..f46ead5 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1496,47 +1496,63 @@ const fs = __importStar(__webpack_require__(747)); const Handlers_1 = __webpack_require__(941); const HttpClient_1 = __webpack_require__(874); const RestClient_1 = __webpack_require__(105); -function getCacheUrl() { +const utils = __importStar(__webpack_require__(443)); +function isSuccessStatusCode(statusCode) { + return statusCode >= 200 && statusCode < 300; +} +function isRetryableStatusCode(statusCode) { + const retryableStatusCodes = [ + HttpClient_1.HttpCodes.BadGateway, + HttpClient_1.HttpCodes.ServiceUnavailable, + HttpClient_1.HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} +function getCacheApiUrl() { // Ideally we just use ACTIONS_CACHE_URL - const cacheUrl = (process.env["ACTIONS_CACHE_URL"] || + const baseUrl = (process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RUNTIME_URL"] || "").replace("pipelines", "artifactcache"); - if (!cacheUrl) { + if (!baseUrl) { throw new Error("Cache Service Url not found, unable to restore cache."); } - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; + core.debug(`Cache Url: ${baseUrl}`); + return `${baseUrl}_apis/artifactcache/`; } function createAcceptHeader(type, apiVersion) { return `${type};api-version=${apiVersion}`; } function getRequestOptions() { const requestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") }; return requestOptions; } +function createRestClient() { + const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; + const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); + return new RestClient_1.RestClient("actions/cache", getCacheApiUrl(), [ + bearerCredentialHandler + ]); +} function getCacheEntry(keys) { + var _a; return __awaiter(this, void 0, void 0, function* () { - const cacheUrl = getCacheUrl(); - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); - const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent(keys.join(","))}`; - const restClient = new RestClient_1.RestClient("actions/cache", cacheUrl, [ - bearerCredentialHandler - ]); + const restClient = createRestClient(); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; const response = yield restClient.get(resource, getRequestOptions()); if (response.statusCode === 204) { return null; } - if (response.statusCode !== 200) { + if (!isSuccessStatusCode(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - if (!cacheResult || !cacheResult.archiveLocation) { + const cacheDownloadUrl = (_a = cacheResult) === null || _a === void 0 ? void 0 : _a.archiveLocation; + if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core.setSecret(cacheResult.archiveLocation); + core.setSecret(cacheDownloadUrl); core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); return cacheResult; @@ -1552,34 +1568,117 @@ function pipeResponseToStream(response, stream) { }); }); } -function downloadCache(cacheEntry, archivePath) { +function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient_1.HttpClient("actions/cache"); - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const downloadResponse = yield httpClient.get(cacheEntry.archiveLocation); + const downloadResponse = yield httpClient.get(archiveLocation); yield pipeResponseToStream(downloadResponse, stream); }); } exports.downloadCache = downloadCache; -function saveCache(key, archivePath) { +// Reserve Cache +function reserveCache(key) { + var _a, _b, _c; return __awaiter(this, void 0, void 0, function* () { - const stream = fs.createReadStream(archivePath); - const cacheUrl = getCacheUrl(); - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new Handlers_1.BearerCredentialHandler(token); - const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; - const postUrl = cacheUrl + resource; - const restClient = new RestClient_1.RestClient("actions/cache", undefined, [ - bearerCredentialHandler - ]); + const restClient = createRestClient(); + const reserveCacheRequest = { + key + }; + const response = yield restClient.create("caches", reserveCacheRequest, getRequestOptions()); + return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); + }); +} +exports.reserveCache = reserveCache; +function getContentRange(start, end) { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} +function uploadChunk(restClient, resourceUrl, data, start, end) { + return __awaiter(this, void 0, void 0, function* () { + core.debug(`Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange(start, end)}`); const requestOptions = getRequestOptions(); requestOptions.additionalHeaders = { - "Content-Type": "application/octet-stream" + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) }; - const response = yield restClient.uploadStream("POST", postUrl, stream, requestOptions); - if (response.statusCode !== 200) { - throw new Error(`Cache service responded with ${response.statusCode}`); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + return yield restClient.uploadStream("PATCH", resourceUrl, data, requestOptions); + }); + const response = yield uploadChunkRequest(); + if (isSuccessStatusCode(response.statusCode)) { + return; + } + if (isRetryableStatusCode(response.statusCode)) { + core.debug(`Received ${response.statusCode}, retrying chunk at offset ${start}.`); + const retryResponse = yield uploadChunkRequest(); + if (isSuccessStatusCode(retryResponse.statusCode)) { + return; + } + } + throw new Error(`Cache service responded with ${response.statusCode} during chunk upload.`); + }); +} +function uploadFile(restClient, cacheId, archivePath) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size; + const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); + const fd = fs.openSync(archivePath, "r"); + const concurrency = (_a = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = (_b = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks + core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug("Awaiting all uploads"); + let offset = 0; + try { + yield Promise.all(parallelUploads.map(() => __awaiter(this, void 0, void 0, function* () { + while (offset < fileSize) { + const chunkSize = Math.min(fileSize - offset, MAX_CHUNK_SIZE); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + const chunk = fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }); + yield uploadChunk(restClient, resourceUrl, chunk, start, end); + } + }))); + } + finally { + fs.closeSync(fd); + } + return; + }); +} +function commitCache(restClient, cacheId, filesize) { + return __awaiter(this, void 0, void 0, function* () { + const requestOptions = getRequestOptions(); + const commitCacheRequest = { size: filesize }; + return yield restClient.create(`caches/${cacheId.toString()}`, commitCacheRequest, requestOptions); + }); +} +function saveCache(cacheId, archivePath) { + return __awaiter(this, void 0, void 0, function* () { + const restClient = createRestClient(); + core.debug("Upload cache"); + yield uploadFile(restClient, cacheId, archivePath); + // Commit Cache + core.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSize(archivePath); + const commitCacheResponse = yield commitCache(restClient, cacheId, cacheSize); + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error(`Cache service responded with ${commitCacheResponse.statusCode} during commit cache.`); } core.info("Cache saved successfully"); }); @@ -2879,11 +2978,10 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const exec_1 = __webpack_require__(986); -const io = __importStar(__webpack_require__(1)); const path = __importStar(__webpack_require__(622)); const cacheHttpClient = __importStar(__webpack_require__(154)); const constants_1 = __webpack_require__(694); +const tar_1 = __webpack_require__(943); const utils = __importStar(__webpack_require__(443)); function run() { return __awaiter(this, void 0, void 0, function* () { @@ -2905,35 +3003,27 @@ function run() { core.info(`Cache hit occurred on the primary key ${primaryKey}, not saving cache.`); return; } + core.debug("Reserving Cache"); + const cacheId = yield cacheHttpClient.reserveCache(primaryKey); + if (cacheId == -1) { + core.info(`Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.`); + return; + } + core.debug(`Cache ID: ${cacheId}`); const cachePath = utils.resolvePath(core.getInput(constants_1.Inputs.Path, { required: true })); core.debug(`Cache Path: ${cachePath}`); const archivePath = path.join(yield utils.createTempDirectory(), "cache.tgz"); core.debug(`Archive Path: ${archivePath}`); - // http://man7.org/linux/man-pages/man1/tar.1.html - // tar [-options] [files or directories which to add into archive] - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-cz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/"), - "." - ] - : ["-cz", "-f", archivePath, "-C", cachePath, "."]; - const tarPath = yield io.which("tar", true); - core.debug(`Tar Path: ${tarPath}`); - yield exec_1.exec(`"${tarPath}"`, args); - const fileSizeLimit = 400 * 1024 * 1024; // 400MB + yield tar_1.createTar(archivePath, cachePath); + const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { - utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.`); + utils.logWarning(`Cache size of ~${Math.round(archiveFileSize / (1024 * 1024))} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.`); return; } - yield cacheHttpClient.saveCache(primaryKey, archivePath); + core.debug(`Saving Cache (ID: ${cacheId})`); + yield cacheHttpClient.saveCache(cacheId, archivePath); } catch (error) { utils.logWarning(error.message); @@ -5133,6 +5223,79 @@ var personalaccesstoken_1 = __webpack_require__(327); exports.PersonalAccessTokenCredentialHandler = personalaccesstoken_1.PersonalAccessTokenCredentialHandler; +/***/ }), + +/***/ 943: +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const exec_1 = __webpack_require__(986); +const io = __importStar(__webpack_require__(1)); +const fs_1 = __webpack_require__(747); +function getTarPath() { + return __awaiter(this, void 0, void 0, function* () { + // Explicitly use BSD Tar on Windows + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; + if (fs_1.existsSync(systemTar)) { + return systemTar; + } + } + return yield io.which("tar", true); + }); +} +function execTar(args) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath()}"`, args); + } + catch (error) { + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); + } + throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + } + }); +} +function extractTar(archivePath, targetDirectory) { + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + yield io.mkdirP(targetDirectory); + const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + yield execTar(args); + }); +} +exports.extractTar = extractTar; +function createTar(archivePath, sourceDirectory) { + return __awaiter(this, void 0, void 0, function* () { + const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + yield execTar(args); + }); +} +exports.createTar = createTar; + + /***/ }), /***/ 986: diff --git a/examples.md b/examples.md index 7d66b62..948179c 100644 --- a/examples.md +++ b/examples.md @@ -1,27 +1,20 @@ # Examples -- [Examples](#examples) - - [C# - Nuget](#c---nuget) - - [Elixir - Mix](#elixir---mix) - - [Go - Modules](#go---modules) - - [Java - Gradle](#java---gradle) - - [Java - Maven](#java---maven) - - [Node - npm](#node---npm) - - [macOS and Ubuntu](#macos-and-ubuntu) - - [Windows](#windows) - - [Using multiple systems and `npm config`](#using-multiple-systems-and-npm-config) - - [Node - Yarn](#node---yarn) - - [PHP - Composer](#php---composer) - - [Python - pip](#python---pip) - - [Simple example](#simple-example) - - [Multiple OS's in a workflow](#multiple-oss-in-a-workflow) - - [Using a script to get cache location](#using-a-script-to-get-cache-location) - - [Ruby - Gem](#ruby---gem) - - [Rust - Cargo](#rust---cargo) - - [Swift, Objective-C - Carthage](#swift-objective-c---carthage) - - [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) +- [C# - NuGet](#c---nuget) +- [Elixir - Mix](#elixir---mix) +- [Go - Modules](#go---modules) +- [Java - Gradle](#java---gradle) +- [Java - Maven](#java---maven) +- [Node - npm](#node---npm) +- [Node - Yarn](#node---yarn) +- [PHP - Composer](#php---composer) +- [Python - pip](#python---pip) +- [Ruby - Gem](#ruby---gem) +- [Rust - Cargo](#rust---cargo) +- [Swift, Objective-C - Carthage](#swift-objective-c---carthage) +- [Swift, Objective-C - CocoaPods](#swift-objective-c---cocoapods) -## C# - Nuget +## C# - NuGet Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/package-references-in-project-files#locking-dependencies): ```yaml @@ -33,6 +26,21 @@ Using [NuGet lock files](https://docs.microsoft.com/nuget/consume-packages/packa ${{ runner.os }}-nuget- ``` +Depending on the environment, huge packages might be pre-installed in the global cache folder. +If you do not want to include them, consider to move the cache folder like below. +>Note: This workflow does not work for projects that require files to be placed in user profile package folder +```yaml +env: + NUGET_PACKAGES: ${{ github.workspace }}/.nuget/packages +steps: + - uses: actions/cache@v1 + with: + path: ${{ github.workspace }}/.nuget/packages + key: ${{ runner.os }}-nuget-${{ hashFiles('**/packages.lock.json') }} + restore-keys: | + ${{ runner.os }}-nuget- +``` + ## Elixir - Mix ```yaml - uses: actions/cache@v1 @@ -226,6 +234,14 @@ Replace `~/.cache/pip` with the correct `path` if not using Ubuntu. restore-keys: | ${{ runner.os }}-gem- ``` +When dependencies are installed later in the workflow, we must specify the same path for the bundler. + +```yaml +- name: Bundle install + run: | + bundle config path vendor/bundle + bundle install --jobs 4 --retry 3 +``` ## Rust - Cargo diff --git a/package-lock.json b/package-lock.json index 2e8413e..d60f8ae 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.3", + "version": "1.1.0", "lockfileVersion": 1, "requires": true, "dependencies": { @@ -2854,9 +2854,9 @@ "dev": true }, "handlebars": { - "version": "4.5.1", - "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.1.tgz", - "integrity": "sha512-C29UoFzHe9yM61lOsIlCE5/mQVGrnIOrOq7maQl76L7tYPCgC1og0Ajt6uWnX4ZTxBPnjw+CUvawphwCfJgUnA==", + "version": "4.5.3", + "resolved": "https://registry.npmjs.org/handlebars/-/handlebars-4.5.3.tgz", + "integrity": "sha512-3yPecJoJHK/4c6aZhSvxOyG4vJKDshV36VHp0iVCDVh7o9w2vwi3NSnL2MMPj3YdduqaBcu7cGbggJQM0br9xA==", "dev": true, "requires": { "neo-async": "^2.6.0", @@ -4859,9 +4859,9 @@ "dev": true }, "prettier": { - "version": "1.18.2", - "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.18.2.tgz", - "integrity": "sha512-OeHeMc0JhFE9idD4ZdtNibzY0+TPHSpSSb9h8FqtP+YnoZZ1sl8Vc9b1sasjfymH3SonAF4QcA2+mzHPhMvIiw==", + "version": "1.19.1", + "resolved": "https://registry.npmjs.org/prettier/-/prettier-1.19.1.tgz", + "integrity": "sha512-s7PoyDv/II1ObgQunCbB9PdLmUcBZcnWOcxDh7O0N/UwDEsHyqkW+Qh28jW+mVuCdx7gLB0BotYI1Y6uI9iyew==", "dev": true }, "prettier-linter-helpers": { @@ -5983,15 +5983,15 @@ } }, "typescript": { - "version": "3.6.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.6.4.tgz", - "integrity": "sha512-unoCll1+l+YK4i4F8f22TaNVPRHcD9PA3yCuZ8g5e0qGqlVlJ/8FSateOLLSagn+Yg5+ZwuPkL8LFUc0Jcvksg==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-3.7.3.tgz", + "integrity": "sha512-Mcr/Qk7hXqFBXMN7p7Lusj1ktCBydylfQM/FZCk5glCNQJrCUKPkMHdo9R0MTFWsC/4kPFvDS0fDPvukfCkFsw==", "dev": true }, "uglify-js": { - "version": "3.6.7", - "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.6.7.tgz", - "integrity": "sha512-4sXQDzmdnoXiO+xvmTzQsfIiwrjUCSA95rSP4SEd8tDb51W2TiDOlL76Hl+Kw0Ie42PSItCW8/t6pBNCF2R48A==", + "version": "3.7.3", + "resolved": "https://registry.npmjs.org/uglify-js/-/uglify-js-3.7.3.tgz", + "integrity": "sha512-7tINm46/3puUA4hCkKYo4Xdts+JDaVC9ZPRcG8Xw9R4nhO/gZgUM3TENq8IF4Vatk8qCig4MzP/c8G4u2BkVQg==", "dev": true, "optional": true, "requires": { diff --git a/package.json b/package.json index 42fbdbe..7de321b 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "cache", - "version": "1.0.3", + "version": "1.1.0", "private": true, "description": "Cache dependencies and build outputs", "main": "dist/restore/index.js", @@ -46,8 +46,8 @@ "jest": "^24.8.0", "jest-circus": "^24.7.1", "nock": "^11.7.0", - "prettier": "1.18.2", + "prettier": "^1.19.1", "ts-jest": "^24.0.2", - "typescript": "^3.6.4" + "typescript": "^3.7.3" } } diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 8a2014f..89bca63 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -1,26 +1,49 @@ import * as core from "@actions/core"; import * as fs from "fs"; import { BearerCredentialHandler } from "typed-rest-client/Handlers"; -import { HttpClient } from "typed-rest-client/HttpClient"; +import { HttpClient, HttpCodes } from "typed-rest-client/HttpClient"; import { IHttpClientResponse } from "typed-rest-client/Interfaces"; -import { IRequestOptions, RestClient } from "typed-rest-client/RestClient"; -import { ArtifactCacheEntry } from "./contracts"; +import { + IRequestOptions, + RestClient, + IRestResponse +} from "typed-rest-client/RestClient"; +import { + ArtifactCacheEntry, + CommitCacheRequest, + ReserveCacheRequest, + ReserveCacheResponse +} from "./contracts"; +import * as utils from "./utils/actionUtils"; -function getCacheUrl(): string { +function isSuccessStatusCode(statusCode: number): boolean { + return statusCode >= 200 && statusCode < 300; +} + +function isRetryableStatusCode(statusCode: number): boolean { + const retryableStatusCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout + ]; + return retryableStatusCodes.includes(statusCode); +} + +function getCacheApiUrl(): string { // Ideally we just use ACTIONS_CACHE_URL - const cacheUrl: string = ( + const baseUrl: string = ( process.env["ACTIONS_CACHE_URL"] || process.env["ACTIONS_RUNTIME_URL"] || "" ).replace("pipelines", "artifactcache"); - if (!cacheUrl) { + if (!baseUrl) { throw new Error( "Cache Service Url not found, unable to restore cache." ); } - core.debug(`Cache Url: ${cacheUrl}`); - return cacheUrl; + core.debug(`Cache Url: ${baseUrl}`); + return `${baseUrl}_apis/artifactcache/`; } function createAcceptHeader(type: string, apiVersion: string): string { @@ -29,26 +52,26 @@ function createAcceptHeader(type: string, apiVersion: string): string { function getRequestOptions(): IRequestOptions { const requestOptions: IRequestOptions = { - acceptHeader: createAcceptHeader("application/json", "5.2-preview.1") + acceptHeader: createAcceptHeader("application/json", "6.0-preview.1") }; return requestOptions; } -export async function getCacheEntry( - keys: string[] -): Promise { - const cacheUrl = getCacheUrl(); +function createRestClient(): RestClient { const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; const bearerCredentialHandler = new BearerCredentialHandler(token); - const resource = `_apis/artifactcache/cache?keys=${encodeURIComponent( - keys.join(",") - )}`; - - const restClient = new RestClient("actions/cache", cacheUrl, [ + return new RestClient("actions/cache", getCacheApiUrl(), [ bearerCredentialHandler ]); +} + +export async function getCacheEntry( + keys: string[] +): Promise { + const restClient = createRestClient(); + const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; const response = await restClient.get( resource, @@ -57,14 +80,15 @@ export async function getCacheEntry( if (response.statusCode === 204) { return null; } - if (response.statusCode !== 200) { + if (!isSuccessStatusCode(response.statusCode)) { throw new Error(`Cache service responded with ${response.statusCode}`); } const cacheResult = response.result; - if (!cacheResult || !cacheResult.archiveLocation) { + const cacheDownloadUrl = cacheResult?.archiveLocation; + if (!cacheDownloadUrl) { throw new Error("Cache not found."); } - core.setSecret(cacheResult.archiveLocation); + core.setSecret(cacheDownloadUrl); core.debug(`Cache Result:`); core.debug(JSON.stringify(cacheResult)); @@ -83,46 +107,178 @@ async function pipeResponseToStream( } export async function downloadCache( - cacheEntry: ArtifactCacheEntry, + archiveLocation: string, archivePath: string ): Promise { const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient("actions/cache"); - // eslint-disable-next-line @typescript-eslint/no-non-null-assertion - const downloadResponse = await httpClient.get(cacheEntry.archiveLocation!); + const downloadResponse = await httpClient.get(archiveLocation); await pipeResponseToStream(downloadResponse, stream); } -export async function saveCache( - key: string, - archivePath: string +// Reserve Cache +export async function reserveCache(key: string): Promise { + const restClient = createRestClient(); + + const reserveCacheRequest: ReserveCacheRequest = { + key + }; + const response = await restClient.create( + "caches", + reserveCacheRequest, + getRequestOptions() + ); + + return response?.result?.cacheId ?? -1; +} + +function getContentRange(start: number, end: number): string { + // Format: `bytes start-end/filesize + // start and end are inclusive + // filesize can be * + // For a 200 byte chunk starting at byte 0: + // Content-Range: bytes 0-199/* + return `bytes ${start}-${end}/*`; +} + +async function uploadChunk( + restClient: RestClient, + resourceUrl: string, + data: NodeJS.ReadableStream, + start: number, + end: number ): Promise { - const stream = fs.createReadStream(archivePath); - - const cacheUrl = getCacheUrl(); - const token = process.env["ACTIONS_RUNTIME_TOKEN"] || ""; - const bearerCredentialHandler = new BearerCredentialHandler(token); - - const resource = `_apis/artifactcache/cache/${encodeURIComponent(key)}`; - const postUrl = cacheUrl + resource; - - const restClient = new RestClient("actions/cache", undefined, [ - bearerCredentialHandler - ]); - + core.debug( + `Uploading chunk of size ${end - + start + + 1} bytes at offset ${start} with content range: ${getContentRange( + start, + end + )}` + ); const requestOptions = getRequestOptions(); requestOptions.additionalHeaders = { - "Content-Type": "application/octet-stream" + "Content-Type": "application/octet-stream", + "Content-Range": getContentRange(start, end) }; - const response = await restClient.uploadStream( - "POST", - postUrl, - stream, + const uploadChunkRequest = async (): Promise> => { + return await restClient.uploadStream( + "PATCH", + resourceUrl, + data, + requestOptions + ); + }; + + const response = await uploadChunkRequest(); + if (isSuccessStatusCode(response.statusCode)) { + return; + } + + if (isRetryableStatusCode(response.statusCode)) { + core.debug( + `Received ${response.statusCode}, retrying chunk at offset ${start}.` + ); + const retryResponse = await uploadChunkRequest(); + if (isSuccessStatusCode(retryResponse.statusCode)) { + return; + } + } + + throw new Error( + `Cache service responded with ${response.statusCode} during chunk upload.` + ); +} + +async function uploadFile( + restClient: RestClient, + cacheId: number, + archivePath: string +): Promise { + // Upload Chunks + const fileSize = fs.statSync(archivePath).size; + const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); + const fd = fs.openSync(archivePath, "r"); + + const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel + const MAX_CHUNK_SIZE = + Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32 * 1024 * 1024; // 32 MB Chunks + core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); + + const parallelUploads = [...new Array(concurrency).keys()]; + core.debug("Awaiting all uploads"); + let offset = 0; + + try { + await Promise.all( + parallelUploads.map(async () => { + while (offset < fileSize) { + const chunkSize = Math.min( + fileSize - offset, + MAX_CHUNK_SIZE + ); + const start = offset; + const end = offset + chunkSize - 1; + offset += MAX_CHUNK_SIZE; + const chunk = fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }); + + await uploadChunk( + restClient, + resourceUrl, + chunk, + start, + end + ); + } + }) + ); + } finally { + fs.closeSync(fd); + } + return; +} + +async function commitCache( + restClient: RestClient, + cacheId: number, + filesize: number +): Promise> { + const requestOptions = getRequestOptions(); + const commitCacheRequest: CommitCacheRequest = { size: filesize }; + return await restClient.create( + `caches/${cacheId.toString()}`, + commitCacheRequest, requestOptions ); - if (response.statusCode !== 200) { - throw new Error(`Cache service responded with ${response.statusCode}`); +} + +export async function saveCache( + cacheId: number, + archivePath: string +): Promise { + const restClient = createRestClient(); + + core.debug("Upload cache"); + await uploadFile(restClient, cacheId, archivePath); + + // Commit Cache + core.debug("Commiting cache"); + const cacheSize = utils.getArchiveFileSize(archivePath); + const commitCacheResponse = await commitCache( + restClient, + cacheId, + cacheSize + ); + if (!isSuccessStatusCode(commitCacheResponse.statusCode)) { + throw new Error( + `Cache service responded with ${commitCacheResponse.statusCode} during commit cache.` + ); } core.info("Cache saved successfully"); diff --git a/src/contracts.d.ts b/src/contracts.d.ts index 8478b83..269c7d9 100644 --- a/src/contracts.d.ts +++ b/src/contracts.d.ts @@ -4,3 +4,16 @@ export interface ArtifactCacheEntry { creationTime?: string; archiveLocation?: string; } + +export interface CommitCacheRequest { + size: number; +} + +export interface ReserveCacheRequest { + key: string; + version?: string; +} + +export interface ReserveCacheResponse { + cacheId: number; +} diff --git a/src/restore.ts b/src/restore.ts index 15570cd..4911e7e 100644 --- a/src/restore.ts +++ b/src/restore.ts @@ -1,9 +1,8 @@ import * as core from "@actions/core"; -import { exec } from "@actions/exec"; -import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "./cacheHttpClient"; import { Events, Inputs, State } from "./constants"; +import { extractTar } from "./tar"; import * as utils from "./utils/actionUtils"; async function run(): Promise { @@ -61,7 +60,7 @@ async function run(): Promise { try { const cacheEntry = await cacheHttpClient.getCacheEntry(keys); - if (!cacheEntry) { + if (!cacheEntry?.archiveLocation) { core.info( `Cache not found for input keys: ${keys.join(", ")}.` ); @@ -78,7 +77,10 @@ async function run(): Promise { utils.setCacheState(cacheEntry); // Download the cache from the cache entry - await cacheHttpClient.downloadCache(cacheEntry, archivePath); + await cacheHttpClient.downloadCache( + cacheEntry.archiveLocation, + archivePath + ); const archiveFileSize = utils.getArchiveFileSize(archivePath); core.info( @@ -87,27 +89,7 @@ async function run(): Promise { )} MB (${archiveFileSize} B)` ); - // Create directory to extract tar into - await io.mkdirP(cachePath); - - // http://man7.org/linux/man-pages/man1/tar.1.html - // tar [-options] [files or directories which to add into archive] - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-xz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/") - ] - : ["-xz", "-f", archivePath, "-C", cachePath]; - - const tarPath = await io.which("tar", true); - core.debug(`Tar Path: ${tarPath}`); - - await exec(`"${tarPath}"`, args); + await extractTar(archivePath, cachePath); const isExactKeyMatch = utils.isExactKeyMatch( primaryKey, diff --git a/src/save.ts b/src/save.ts index 21f32d3..ee64e42 100644 --- a/src/save.ts +++ b/src/save.ts @@ -1,9 +1,8 @@ import * as core from "@actions/core"; -import { exec } from "@actions/exec"; -import * as io from "@actions/io"; import * as path from "path"; import * as cacheHttpClient from "./cacheHttpClient"; import { Events, Inputs, State } from "./constants"; +import { createTar } from "./tar"; import * as utils from "./utils/actionUtils"; async function run(): Promise { @@ -35,6 +34,15 @@ async function run(): Promise { return; } + core.debug("Reserving Cache"); + const cacheId = await cacheHttpClient.reserveCache(primaryKey); + if (cacheId == -1) { + core.info( + `Unable to reserve cache with key ${primaryKey}, another job may be creating this cache.` + ); + return; + } + core.debug(`Cache ID: ${cacheId}`); const cachePath = utils.resolvePath( core.getInput(Inputs.Path, { required: true }) ); @@ -46,38 +54,22 @@ async function run(): Promise { ); core.debug(`Archive Path: ${archivePath}`); - // http://man7.org/linux/man-pages/man1/tar.1.html - // tar [-options] [files or directories which to add into archive] - const IS_WINDOWS = process.platform === "win32"; - const args = IS_WINDOWS - ? [ - "-cz", - "--force-local", - "-f", - archivePath.replace(/\\/g, "/"), - "-C", - cachePath.replace(/\\/g, "/"), - "." - ] - : ["-cz", "-f", archivePath, "-C", cachePath, "."]; + await createTar(archivePath, cachePath); - const tarPath = await io.which("tar", true); - core.debug(`Tar Path: ${tarPath}`); - await exec(`"${tarPath}"`, args); - - const fileSizeLimit = 400 * 1024 * 1024; // 400MB + const fileSizeLimit = 2 * 1024 * 1024 * 1024; // 2GB per repo limit const archiveFileSize = utils.getArchiveFileSize(archivePath); core.debug(`File Size: ${archiveFileSize}`); if (archiveFileSize > fileSizeLimit) { utils.logWarning( `Cache size of ~${Math.round( archiveFileSize / (1024 * 1024) - )} MB (${archiveFileSize} B) is over the 400MB limit, not saving cache.` + )} MB (${archiveFileSize} B) is over the 2GB limit, not saving cache.` ); return; } - await cacheHttpClient.saveCache(primaryKey, archivePath); + core.debug(`Saving Cache (ID: ${cacheId})`); + await cacheHttpClient.saveCache(cacheId, archivePath); } catch (error) { utils.logWarning(error.message); } diff --git a/src/tar.ts b/src/tar.ts new file mode 100644 index 0000000..1f572d1 --- /dev/null +++ b/src/tar.ts @@ -0,0 +1,47 @@ +import { exec } from "@actions/exec"; +import * as io from "@actions/io"; +import { existsSync } from "fs"; + +async function getTarPath(): Promise { + // Explicitly use BSD Tar on Windows + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; + if (existsSync(systemTar)) { + return systemTar; + } + } + return await io.which("tar", true); +} + +async function execTar(args: string[]): Promise { + try { + await exec(`"${await getTarPath()}"`, args); + } catch (error) { + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + throw new Error( + `Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.` + ); + } + throw new Error(`Tar failed with error: ${error?.message}`); + } +} + +export async function extractTar( + archivePath: string, + targetDirectory: string +): Promise { + // Create directory to extract tar into + await io.mkdirP(targetDirectory); + const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + await execTar(args); +} + +export async function createTar( + archivePath: string, + sourceDirectory: string +): Promise { + const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + await execTar(args); +} From cf4f44db70fa0ef3627a1bb7f5136becb53e2c5d Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Mon, 6 Jan 2020 13:50:39 -0500 Subject: [PATCH 08/21] Fix invalid array --- src/cacheHttpClient.ts | 16 ++++++++++++---- 1 file changed, 12 insertions(+), 4 deletions(-) diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 89bca63..4defdf8 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -201,11 +201,19 @@ async function uploadFile( const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const fd = fs.openSync(archivePath, "r"); - const concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]) ?? 4; // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = - Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]) ?? 32 * 1024 * 1024; // 32 MB Chunks - core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); + let concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]); // # of HTTP requests in parallel + if (Number.isNaN(concurrency) || concurrency < 0) { + concurrency = 4; + } + let MAX_CHUNK_SIZE = + Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]); + + if (Number.isNaN(MAX_CHUNK_SIZE) || MAX_CHUNK_SIZE < 0) { + concurrency = 32 * 1024 * 1024; // 32 MB Chunks + } + + core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); const parallelUploads = [...new Array(concurrency).keys()]; core.debug("Awaiting all uploads"); let offset = 0; From 10a14413e745e187cac1bc816a54ed95af724c1a Mon Sep 17 00:00:00 2001 From: Josh Gross Date: Mon, 6 Jan 2020 13:51:23 -0500 Subject: [PATCH 09/21] Update release binaries --- dist/restore/index.js | 11 ++++++++--- dist/save/index.js | 11 ++++++++--- 2 files changed, 16 insertions(+), 6 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index af5fd45..d3825a0 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1626,14 +1626,19 @@ function uploadChunk(restClient, resourceUrl, data, start, end) { }); } function uploadFile(restClient, cacheId, archivePath) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { // Upload Chunks const fileSize = fs.statSync(archivePath).size; const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const fd = fs.openSync(archivePath, "r"); - const concurrency = (_a = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = (_b = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks + let concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]); // # of HTTP requests in parallel + if (Number.isNaN(concurrency) || concurrency < 0) { + concurrency = 4; + } + let MAX_CHUNK_SIZE = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]); + if (Number.isNaN(MAX_CHUNK_SIZE) || MAX_CHUNK_SIZE < 0) { + concurrency = 32 * 1024 * 1024; // 32 MB Chunks + } core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); const parallelUploads = [...new Array(concurrency).keys()]; core.debug("Awaiting all uploads"); diff --git a/dist/save/index.js b/dist/save/index.js index f46ead5..70caa97 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1626,14 +1626,19 @@ function uploadChunk(restClient, resourceUrl, data, start, end) { }); } function uploadFile(restClient, cacheId, archivePath) { - var _a, _b; return __awaiter(this, void 0, void 0, function* () { // Upload Chunks const fileSize = fs.statSync(archivePath).size; const resourceUrl = getCacheApiUrl() + "caches/" + cacheId.toString(); const fd = fs.openSync(archivePath, "r"); - const concurrency = (_a = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]), (_a !== null && _a !== void 0 ? _a : 4)); // # of HTTP requests in parallel - const MAX_CHUNK_SIZE = (_b = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]), (_b !== null && _b !== void 0 ? _b : 32 * 1024 * 1024)); // 32 MB Chunks + let concurrency = Number(process.env["CACHE_UPLOAD_CONCURRENCY"]); // # of HTTP requests in parallel + if (Number.isNaN(concurrency) || concurrency < 0) { + concurrency = 4; + } + let MAX_CHUNK_SIZE = Number(process.env["CACHE_UPLOAD_CHUNK_SIZE"]); + if (Number.isNaN(MAX_CHUNK_SIZE) || MAX_CHUNK_SIZE < 0) { + concurrency = 32 * 1024 * 1024; // 32 MB Chunks + } core.debug(`Concurrency: ${concurrency} and Chunk Size: ${MAX_CHUNK_SIZE}`); const parallelUploads = [...new Array(concurrency).keys()]; core.debug("Awaiting all uploads"); From 84e606dfac347c57a599fe0acefc86daab454178 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Thu, 9 Apr 2020 10:29:33 -0400 Subject: [PATCH 10/21] Fallback to GNU tar if BSD tar is unavailable --- __tests__/tar.test.ts | 55 +++++++++++++++++++++++---- dist/restore/index.js | 87 ++++++++++++++++++++++++++++++++++++++++--- dist/save/index.js | 87 ++++++++++++++++++++++++++++++++++++++++--- src/tar.ts | 48 +++++++++++++++++++----- 4 files changed, 249 insertions(+), 28 deletions(-) diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts index 55ff4c7..cc94a4d 100644 --- a/__tests__/tar.test.ts +++ b/__tests__/tar.test.ts @@ -1,5 +1,7 @@ import * as exec from "@actions/exec"; import * as io from "@actions/io"; +import * as fs from "fs"; +import * as path from "path"; import * as tar from "../src/tar"; jest.mock("@actions/exec"); @@ -11,17 +13,19 @@ beforeAll(() => { }); }); -test("extract tar", async () => { +test("extract BSD tar", async () => { const mkdirMock = jest.spyOn(io, "mkdirP"); const execMock = jest.spyOn(exec, "exec"); - const archivePath = "cache.tar"; + const IS_WINDOWS = process.platform === "win32"; + const archivePath = IS_WINDOWS + ? `${process.env["windir"]}\\fakepath\\cache.tar` + : "cache.tar"; const targetDirectory = "~/.npm/cache"; await tar.extractTar(archivePath, targetDirectory); expect(mkdirMock).toHaveBeenCalledWith(targetDirectory); - const IS_WINDOWS = process.platform === "win32"; const tarPath = IS_WINDOWS ? `${process.env["windir"]}\\System32\\tar.exe` : "tar"; @@ -29,13 +33,48 @@ test("extract tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-xz", "-f", - archivePath, + archivePath?.replace(/\\/g, "/"), "-C", - targetDirectory + targetDirectory?.replace(/\\/g, "/"), ]); }); -test("create tar", async () => { +test("extract GNU tar", async () => { + const IS_WINDOWS = process.platform === "win32"; + if (IS_WINDOWS) { + jest.mock("fs"); + + const execMock = jest.spyOn(exec, "exec"); + const existsSyncMock = jest + .spyOn(fs, "existsSync") + .mockReturnValue(false); + const isGnuTarMock = jest + .spyOn(tar, "isGnuTar") + .mockReturnValue(Promise.resolve(true)); + const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; + const targetDirectory = "~/.npm/cache"; + + await tar.extractTar(archivePath, targetDirectory); + + expect(existsSyncMock).toHaveBeenCalledTimes(1); + expect(isGnuTarMock).toHaveBeenCalledTimes(1); + expect(execMock).toHaveBeenCalledTimes(2); + expect(execMock).toHaveBeenLastCalledWith( + "tar", + [ + "-xz", + "-f", + archivePath?.replace(/\\/g, "/"), + "-C", + targetDirectory?.replace(/\\/g, "/"), + "--force-local" + ], + { cwd: undefined } + ); + } +}); + +test("create BSD tar", async () => { const execMock = jest.spyOn(exec, "exec"); const archivePath = "cache.tar"; @@ -50,9 +89,9 @@ test("create tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-cz", "-f", - archivePath, + archivePath?.replace(/\\/g, "/"), "-C", - sourceDirectory, + sourceDirectory?.replace(/\\/g, "/"), "." ]); }); diff --git a/dist/restore/index.js b/dist/restore/index.js index a3ea855..8ccadfb 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2928,10 +2928,34 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); +<<<<<<< HEAD function getTarPath() { +======= +const path = __importStar(__webpack_require__(622)); +const constants_1 = __webpack_require__(694); +function isGnuTar() { + return __awaiter(this, void 0, void 0, function* () { + core.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); + }); +} +exports.isGnuTar = isGnuTar; +function getTarPath(args) { +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2940,38 +2964,91 @@ function getTarPath() { if (fs_1.existsSync(systemTar)) { return systemTar; } + else if (isGnuTar()) { + args.push("--force-local"); + } } return yield io.which("tar", true); }); } +<<<<<<< HEAD function execTar(args) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { yield exec_1.exec(`"${yield getTarPath()}"`, args); +======= +function execTar(args, cwd) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd }); +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); - } - throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } +<<<<<<< HEAD function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; +======= +function getWorkingDirectory() { + var _a; + return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd()); +} +function extractTar(archivePath) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + const args = [ + "-xz", + "-f", + (_a = archivePath) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), + "-P", + "-C", + (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/") + ]; +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable yield execTar(args); }); } exports.extractTar = extractTar; +<<<<<<< HEAD function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; yield execTar(args); +======= +function createTar(archiveFolder, sourceDirectories) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = "manifest.txt"; + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); + const workingDirectory = getWorkingDirectory(); + const args = [ + "-cz", + "-f", +<<<<<<< HEAD + constants_1.CacheFilename, + "-P", +======= + (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), +>>>>>>> Fallback to GNU tar if BSD tar is unavailable + "-C", + (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), + "--files-from", + manifestFilename + ]; + yield execTar(args, archiveFolder); +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable }); } exports.createTar = createTar; diff --git a/dist/save/index.js b/dist/save/index.js index e7e0eae..33d75c6 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2909,10 +2909,34 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); +<<<<<<< HEAD function getTarPath() { +======= +const path = __importStar(__webpack_require__(622)); +const constants_1 = __webpack_require__(694); +function isGnuTar() { + return __awaiter(this, void 0, void 0, function* () { + core.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); + }); +} +exports.isGnuTar = isGnuTar; +function getTarPath(args) { +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2921,38 +2945,91 @@ function getTarPath() { if (fs_1.existsSync(systemTar)) { return systemTar; } + else if (isGnuTar()) { + args.push("--force-local"); + } } return yield io.which("tar", true); }); } +<<<<<<< HEAD function execTar(args) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { yield exec_1.exec(`"${yield getTarPath()}"`, args); +======= +function execTar(args, cwd) { + var _a; + return __awaiter(this, void 0, void 0, function* () { + try { + yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd }); +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}. Ensure BSD tar is installed and on the PATH.`); - } - throw new Error(`Tar failed with error: ${(_b = error) === null || _b === void 0 ? void 0 : _b.message}`); + throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } +<<<<<<< HEAD function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; +======= +function getWorkingDirectory() { + var _a; + return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd()); +} +function extractTar(archivePath) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Create directory to extract tar into + const workingDirectory = getWorkingDirectory(); + yield io.mkdirP(workingDirectory); + const args = [ + "-xz", + "-f", + (_a = archivePath) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), + "-P", + "-C", + (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/") + ]; +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable yield execTar(args); }); } exports.extractTar = extractTar; +<<<<<<< HEAD function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; yield execTar(args); +======= +function createTar(archiveFolder, sourceDirectories) { + var _a, _b; + return __awaiter(this, void 0, void 0, function* () { + // Write source directories to manifest.txt to avoid command length limits + const manifestFilename = "manifest.txt"; + fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); + const workingDirectory = getWorkingDirectory(); + const args = [ + "-cz", + "-f", +<<<<<<< HEAD + constants_1.CacheFilename, + "-P", +======= + (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), +>>>>>>> Fallback to GNU tar if BSD tar is unavailable + "-C", + (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), + "--files-from", + manifestFilename + ]; + yield execTar(args, archiveFolder); +>>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable }); } exports.createTar = createTar; diff --git a/src/tar.ts b/src/tar.ts index 1f572d1..c20c15d 100644 --- a/src/tar.ts +++ b/src/tar.ts @@ -1,14 +1,35 @@ +import * as core from "@actions/core"; import { exec } from "@actions/exec"; import * as io from "@actions/io"; import { existsSync } from "fs"; +import * as path from "path"; -async function getTarPath(): Promise { +export async function isGnuTar(): Promise { + core.debug("Checking tar --version"); + let versionOutput = ""; + await exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data: Buffer): string => + (versionOutput += data.toString()), + stderr: (data: Buffer): string => (versionOutput += data.toString()) + } + }); + + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); +} + +async function getTarPath(args: string[]): Promise { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; if (existsSync(systemTar)) { return systemTar; + } else if (isGnuTar()) { + args.push("--force-local"); } } return await io.which("tar", true); @@ -16,14 +37,8 @@ async function getTarPath(): Promise { async function execTar(args: string[]): Promise { try { - await exec(`"${await getTarPath()}"`, args); + await exec(`"${await getTarPath(args)}"`, args); } catch (error) { - const IS_WINDOWS = process.platform === "win32"; - if (IS_WINDOWS) { - throw new Error( - `Tar failed with error: ${error?.message}. Ensure BSD tar is installed and on the PATH.` - ); - } throw new Error(`Tar failed with error: ${error?.message}`); } } @@ -34,7 +49,13 @@ export async function extractTar( ): Promise { // Create directory to extract tar into await io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath?.replace(/\\/g, "/"), + "-C", + targetDirectory?.replace(/\\/g, "/") + ]; await execTar(args); } @@ -42,6 +63,13 @@ export async function createTar( archivePath: string, sourceDirectory: string ): Promise { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath?.replace(/\\/g, "/"), + "-C", + sourceDirectory?.replace(/\\/g, "/"), + "." + ]; await execTar(args); } From 96e5a46c57ebbeb88f20116716d147b285ba34d0 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Fri, 10 Apr 2020 15:26:15 -0400 Subject: [PATCH 11/21] Fix test --- __tests__/tar.test.ts | 16 +++++----------- 1 file changed, 5 insertions(+), 11 deletions(-) diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts index cc94a4d..8e9dd5e 100644 --- a/__tests__/tar.test.ts +++ b/__tests__/tar.test.ts @@ -1,9 +1,10 @@ import * as exec from "@actions/exec"; import * as io from "@actions/io"; -import * as fs from "fs"; import * as path from "path"; import * as tar from "../src/tar"; +import fs = require("fs"); + jest.mock("@actions/exec"); jest.mock("@actions/io"); @@ -42,25 +43,18 @@ test("extract BSD tar", async () => { test("extract GNU tar", async () => { const IS_WINDOWS = process.platform === "win32"; if (IS_WINDOWS) { - jest.mock("fs"); + jest.spyOn(fs, "existsSync").mockReturnValueOnce(false); + jest.spyOn(tar, "isGnuTar").mockReturnValue(Promise.resolve(true)); const execMock = jest.spyOn(exec, "exec"); - const existsSyncMock = jest - .spyOn(fs, "existsSync") - .mockReturnValue(false); - const isGnuTarMock = jest - .spyOn(tar, "isGnuTar") - .mockReturnValue(Promise.resolve(true)); const archivePath = `${process.env["windir"]}\\fakepath\\cache.tar`; const targetDirectory = "~/.npm/cache"; await tar.extractTar(archivePath, targetDirectory); - expect(existsSyncMock).toHaveBeenCalledTimes(1); - expect(isGnuTarMock).toHaveBeenCalledTimes(1); expect(execMock).toHaveBeenCalledTimes(2); expect(execMock).toHaveBeenLastCalledWith( - "tar", + `"tar"`, [ "-xz", "-f", From 7c7d003bbbad33ad70cbf095976259f7c974d91b Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Fri, 10 Apr 2020 15:34:34 -0400 Subject: [PATCH 12/21] Rebase and rebuild --- dist/restore/index.js | 6 +----- dist/save/index.js | 6 +----- 2 files changed, 2 insertions(+), 10 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 8ccadfb..eeb2634 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -3036,12 +3036,8 @@ function createTar(archiveFolder, sourceDirectories) { const args = [ "-cz", "-f", -<<<<<<< HEAD - constants_1.CacheFilename, - "-P", -======= (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), ->>>>>>> Fallback to GNU tar if BSD tar is unavailable + "-P", "-C", (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), "--files-from", diff --git a/dist/save/index.js b/dist/save/index.js index 33d75c6..251e559 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -3017,12 +3017,8 @@ function createTar(archiveFolder, sourceDirectories) { const args = [ "-cz", "-f", -<<<<<<< HEAD - constants_1.CacheFilename, - "-P", -======= (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), ->>>>>>> Fallback to GNU tar if BSD tar is unavailable + "-P", "-C", (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), "--files-from", From 9fe7ad8b07cdf5e7e198f006d6e5156a9012ef87 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Mon, 13 Apr 2020 12:20:27 -0400 Subject: [PATCH 13/21] Use path.sep in path replace --- __tests__/tar.test.ts | 29 +++++++---------- dist/restore/index.js | 54 -------------------------------- dist/save/index.js | 73 ------------------------------------------- src/tar.ts | 10 +++--- 4 files changed, 17 insertions(+), 149 deletions(-) diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts index 8e9dd5e..d5d9b62 100644 --- a/__tests__/tar.test.ts +++ b/__tests__/tar.test.ts @@ -1,6 +1,5 @@ import * as exec from "@actions/exec"; import * as io from "@actions/io"; -import * as path from "path"; import * as tar from "../src/tar"; import fs = require("fs"); @@ -34,9 +33,9 @@ test("extract BSD tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-xz", "-f", - archivePath?.replace(/\\/g, "/"), + IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, "-C", - targetDirectory?.replace(/\\/g, "/"), + IS_WINDOWS ? targetDirectory?.replace(/\\/g, "/") : targetDirectory ]); }); @@ -53,18 +52,14 @@ test("extract GNU tar", async () => { await tar.extractTar(archivePath, targetDirectory); expect(execMock).toHaveBeenCalledTimes(2); - expect(execMock).toHaveBeenLastCalledWith( - `"tar"`, - [ - "-xz", - "-f", - archivePath?.replace(/\\/g, "/"), - "-C", - targetDirectory?.replace(/\\/g, "/"), - "--force-local" - ], - { cwd: undefined } - ); + expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [ + "-xz", + "-f", + archivePath.replace(/\\/g, "/"), + "-C", + targetDirectory?.replace(/\\/g, "/"), + "--force-local" + ]); } }); @@ -83,9 +78,9 @@ test("create BSD tar", async () => { expect(execMock).toHaveBeenCalledWith(`"${tarPath}"`, [ "-cz", "-f", - archivePath?.replace(/\\/g, "/"), + IS_WINDOWS ? archivePath.replace(/\\/g, "/") : archivePath, "-C", - sourceDirectory?.replace(/\\/g, "/"), + IS_WINDOWS ? sourceDirectory?.replace(/\\/g, "/") : sourceDirectory, "." ]); }); diff --git a/dist/restore/index.js b/dist/restore/index.js index eeb2634..732f7fc 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2932,9 +2932,6 @@ const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); -<<<<<<< HEAD -function getTarPath() { -======= const path = __importStar(__webpack_require__(622)); const constants_1 = __webpack_require__(694); function isGnuTar() { @@ -2955,7 +2952,6 @@ function isGnuTar() { } exports.isGnuTar = isGnuTar; function getTarPath(args) { ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2971,80 +2967,30 @@ function getTarPath(args) { return yield io.which("tar", true); }); } -<<<<<<< HEAD function execTar(args) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { yield exec_1.exec(`"${yield getTarPath()}"`, args); -======= -function execTar(args, cwd) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd }); ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable } catch (error) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } -<<<<<<< HEAD function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; -======= -function getWorkingDirectory() { - var _a; - return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd()); -} -function extractTar(archivePath) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - // Create directory to extract tar into - const workingDirectory = getWorkingDirectory(); - yield io.mkdirP(workingDirectory); - const args = [ - "-xz", - "-f", - (_a = archivePath) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), - "-P", - "-C", - (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/") - ]; ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable yield execTar(args); }); } exports.extractTar = extractTar; -<<<<<<< HEAD function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; yield execTar(args); -======= -function createTar(archiveFolder, sourceDirectories) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = "manifest.txt"; - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); - const workingDirectory = getWorkingDirectory(); - const args = [ - "-cz", - "-f", - (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), - "-P", - "-C", - (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), - "--files-from", - manifestFilename - ]; - yield execTar(args, archiveFolder); ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable }); } exports.createTar = createTar; diff --git a/dist/save/index.js b/dist/save/index.js index 251e559..b83d036 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2913,30 +2913,7 @@ const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); -<<<<<<< HEAD function getTarPath() { -======= -const path = __importStar(__webpack_require__(622)); -const constants_1 = __webpack_require__(694); -function isGnuTar() { - return __awaiter(this, void 0, void 0, function* () { - core.debug("Checking tar --version"); - let versionOutput = ""; - yield exec_1.exec("tar --version", [], { - ignoreReturnCode: true, - silent: true, - listeners: { - stdout: (data) => (versionOutput += data.toString()), - stderr: (data) => (versionOutput += data.toString()) - } - }); - core.debug(versionOutput.trim()); - return versionOutput.toUpperCase().includes("GNU TAR"); - }); -} -exports.isGnuTar = isGnuTar; -function getTarPath(args) { ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2952,80 +2929,30 @@ function getTarPath(args) { return yield io.which("tar", true); }); } -<<<<<<< HEAD function execTar(args) { var _a, _b; return __awaiter(this, void 0, void 0, function* () { try { yield exec_1.exec(`"${yield getTarPath()}"`, args); -======= -function execTar(args, cwd) { - var _a; - return __awaiter(this, void 0, void 0, function* () { - try { - yield exec_1.exec(`"${yield getTarPath(args)}"`, args, { cwd: cwd }); ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable } catch (error) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); } }); } -<<<<<<< HEAD function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; -======= -function getWorkingDirectory() { - var _a; - return _a = process.env["GITHUB_WORKSPACE"], (_a !== null && _a !== void 0 ? _a : process.cwd()); -} -function extractTar(archivePath) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - // Create directory to extract tar into - const workingDirectory = getWorkingDirectory(); - yield io.mkdirP(workingDirectory); - const args = [ - "-xz", - "-f", - (_a = archivePath) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), - "-P", - "-C", - (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/") - ]; ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable yield execTar(args); }); } exports.extractTar = extractTar; -<<<<<<< HEAD function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; yield execTar(args); -======= -function createTar(archiveFolder, sourceDirectories) { - var _a, _b; - return __awaiter(this, void 0, void 0, function* () { - // Write source directories to manifest.txt to avoid command length limits - const manifestFilename = "manifest.txt"; - fs_1.writeFileSync(path.join(archiveFolder, manifestFilename), sourceDirectories.join("\n")); - const workingDirectory = getWorkingDirectory(); - const args = [ - "-cz", - "-f", - (_a = constants_1.CacheFilename) === null || _a === void 0 ? void 0 : _a.replace(/\\/g, "/"), - "-P", - "-C", - (_b = workingDirectory) === null || _b === void 0 ? void 0 : _b.replace(/\\/g, "/"), - "--files-from", - manifestFilename - ]; - yield execTar(args, archiveFolder); ->>>>>>> 4fa017f... Fallback to GNU tar if BSD tar is unavailable }); } exports.createTar = createTar; diff --git a/src/tar.ts b/src/tar.ts index c20c15d..dde9b61 100644 --- a/src/tar.ts +++ b/src/tar.ts @@ -28,7 +28,7 @@ async function getTarPath(args: string[]): Promise { const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; if (existsSync(systemTar)) { return systemTar; - } else if (isGnuTar()) { + } else if (await isGnuTar()) { args.push("--force-local"); } } @@ -52,9 +52,9 @@ export async function extractTar( const args = [ "-xz", "-f", - archivePath?.replace(/\\/g, "/"), + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), "-C", - targetDirectory?.replace(/\\/g, "/") + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") ]; await execTar(args); } @@ -66,9 +66,9 @@ export async function createTar( const args = [ "-cz", "-f", - archivePath?.replace(/\\/g, "/"), + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), "-C", - sourceDirectory?.replace(/\\/g, "/"), + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), "." ]; await execTar(args); From 5a0add1806bb8f47699878d071e546b093793bd2 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Wed, 22 Apr 2020 18:23:41 -0400 Subject: [PATCH 14/21] Adds socket timeout and validate file size --- src/cacheHttpClient.ts | 28 ++++++++++++++++++++++++++++ src/constants.ts | 2 ++ 2 files changed, 30 insertions(+) diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 62ae2c1..4cc9c92 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -7,6 +7,8 @@ import { IRequestOptions, ITypedResponse } from "@actions/http-client/interfaces"; + +import { SocketTimeout } from "./constants"; import { ArtifactCacheEntry, CommitCacheRequest, @@ -123,7 +125,33 @@ export async function downloadCache( const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient("actions/cache"); const downloadResponse = await httpClient.get(archiveLocation); + + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug( + `Aborting download, socket timed out after ${SocketTimeout} ms` + ); + }); + await pipeResponseToStream(downloadResponse, stream); + + // Validate download size. + var contentLengthHeader = + downloadResponse.message.headers["content-length"]; + + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + + if (actualLength != expectedLength) { + throw new Error( + `Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}` + ); + } + } else { + core.debug("Unable to validate download, no Content-Length header"); + } } // Reserve Cache diff --git a/src/constants.ts b/src/constants.ts index 5f26e8c..a39e5e0 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -18,3 +18,5 @@ export enum Events { Push = "push", PullRequest = "pull_request" } + +export const SocketTimeout = 5000; From cbbb8b4d4f487d1b7589fff9ee89e26d12b54961 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Wed, 22 Apr 2020 18:35:16 -0400 Subject: [PATCH 15/21] Fix lint issue, build .js files --- dist/restore/index.js | 30 ++++++++++++++++++++++++++++++ dist/save/index.js | 29 +++++++++++++++++++++++++++++ src/cacheHttpClient.ts | 2 +- 3 files changed, 60 insertions(+), 1 deletion(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 732f7fc..77148ca 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1256,6 +1256,7 @@ const fs = __importStar(__webpack_require__(747)); const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); const utils = __importStar(__webpack_require__(443)); +const constants_1 = __webpack_require__(694); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; @@ -1339,7 +1340,24 @@ function downloadCache(archiveLocation, archivePath) { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield httpClient.get(archiveLocation); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); yield pipeResponseToStream(downloadResponse, stream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + if (actualLength != expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug("Unable to validate download, no Content-Length header"); + } }); } exports.downloadCache = downloadCache; @@ -1647,6 +1665,10 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); +<<<<<<< HEAD +======= +const glob = __importStar(__webpack_require__(281)); +>>>>>>> 9bb13c7... Fix lint issue, build .js files const io = __importStar(__webpack_require__(1)); const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); @@ -2022,6 +2044,12 @@ class HttpClientResponse { this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); + this.message.on('aborted', () => { + reject("Request was aborted or closed prematurely"); + }); + this.message.on('timeout', (socket) => { + reject("Request timed out"); + }); this.message.on('end', () => { resolve(output.toString()); }); @@ -2143,6 +2171,7 @@ class HttpClient { let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); + // Check if it's an authentication challenge if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; @@ -2721,6 +2750,7 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +exports.SocketTimeout = 5000; /***/ }), diff --git a/dist/save/index.js b/dist/save/index.js index b83d036..bdc5c09 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1339,7 +1339,24 @@ function downloadCache(archiveLocation, archivePath) { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); const downloadResponse = yield httpClient.get(archiveLocation); + // Abort download if no traffic received over the socket. + downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { + downloadResponse.message.destroy(); + core.debug(`Aborting download, socket timed out after ${constants_1.SocketTimeout} ms`); + }); yield pipeResponseToStream(downloadResponse, stream); + // Validate download size. + const contentLengthHeader = downloadResponse.message.headers["content-length"]; + if (contentLengthHeader) { + const expectedLength = parseInt(contentLengthHeader); + const actualLength = utils.getArchiveFileSize(archivePath); + if (actualLength != expectedLength) { + throw new Error(`Incomplete download. Expected file size: ${expectedLength}, actual file size: ${actualLength}`); + } + } + else { + core.debug("Unable to validate download, no Content-Length header"); + } }); } exports.downloadCache = downloadCache; @@ -1647,6 +1664,10 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); +<<<<<<< HEAD +======= +const glob = __importStar(__webpack_require__(281)); +>>>>>>> 9bb13c7... Fix lint issue, build .js files const io = __importStar(__webpack_require__(1)); const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); @@ -2022,6 +2043,12 @@ class HttpClientResponse { this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); + this.message.on('aborted', () => { + reject("Request was aborted or closed prematurely"); + }); + this.message.on('timeout', (socket) => { + reject("Request timed out"); + }); this.message.on('end', () => { resolve(output.toString()); }); @@ -2143,6 +2170,7 @@ class HttpClient { let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); + // Check if it's an authentication challenge if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; @@ -2802,6 +2830,7 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +exports.SocketTimeout = 5000; /***/ }), diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 4cc9c92..702a01a 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -137,7 +137,7 @@ export async function downloadCache( await pipeResponseToStream(downloadResponse, stream); // Validate download size. - var contentLengthHeader = + const contentLengthHeader = downloadResponse.message.headers["content-length"]; if (contentLengthHeader) { From 2a973a0f4ea7187ed295bd4e479051fb057deb32 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Tue, 28 Apr 2020 21:31:41 -0400 Subject: [PATCH 16/21] Add comment for SocketTimeout --- dist/restore/index.js | 3 +++ dist/save/index.js | 3 +++ src/constants.ts | 3 +++ 3 files changed, 9 insertions(+) diff --git a/dist/restore/index.js b/dist/restore/index.js index 77148ca..e175183 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -2750,6 +2750,9 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. exports.SocketTimeout = 5000; diff --git a/dist/save/index.js b/dist/save/index.js index bdc5c09..602b1cf 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2830,6 +2830,9 @@ var Events; Events["Push"] = "push"; Events["PullRequest"] = "pull_request"; })(Events = exports.Events || (exports.Events = {})); +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. exports.SocketTimeout = 5000; diff --git a/src/constants.ts b/src/constants.ts index a39e5e0..2e60e34 100644 --- a/src/constants.ts +++ b/src/constants.ts @@ -19,4 +19,7 @@ export enum Events { PullRequest = "pull_request" } +// Socket timeout in milliseconds during download. If no traffic is received +// over the socket during this period, the socket is destroyed and the download +// is aborted. export const SocketTimeout = 5000; From ec7f7ebd08bf74c5f5ae1a027e3cd2cbb62a2164 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Wed, 29 Apr 2020 09:31:53 -0400 Subject: [PATCH 17/21] Use promisify of stream.pipeline for downloading --- dist/restore/index.js | 23 +++++++++++++---------- dist/save/index.js | 23 +++++++++++++---------- src/cacheHttpClient.ts | 15 +++++++-------- 3 files changed, 33 insertions(+), 28 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index e175183..1fd26f2 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1255,6 +1255,9 @@ const core = __importStar(__webpack_require__(470)); const fs = __importStar(__webpack_require__(747)); const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const stream = __importStar(__webpack_require__(794)); +const util = __importStar(__webpack_require__(669)); +const constants_1 = __webpack_require__(694); const utils = __importStar(__webpack_require__(443)); const constants_1 = __webpack_require__(694); function isSuccessStatusCode(statusCode) { @@ -1326,13 +1329,10 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function pipeResponseToStream(response, stream) { +function pipeResponseToStream(response, output) { return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); }); } function downloadCache(archiveLocation, archivePath) { @@ -1665,10 +1665,6 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -<<<<<<< HEAD -======= -const glob = __importStar(__webpack_require__(281)); ->>>>>>> 9bb13c7... Fix lint issue, build .js files const io = __importStar(__webpack_require__(1)); const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); @@ -2894,6 +2890,13 @@ run(); exports.default = run; +/***/ }), + +/***/ 794: +/***/ (function(module) { + +module.exports = require("stream"); + /***/ }), /***/ 826: diff --git a/dist/save/index.js b/dist/save/index.js index 602b1cf..a595665 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1255,6 +1255,9 @@ const core = __importStar(__webpack_require__(470)); const fs = __importStar(__webpack_require__(747)); const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const stream = __importStar(__webpack_require__(794)); +const util = __importStar(__webpack_require__(669)); +const constants_1 = __webpack_require__(694); const utils = __importStar(__webpack_require__(443)); function isSuccessStatusCode(statusCode) { if (!statusCode) { @@ -1325,13 +1328,10 @@ function getCacheEntry(keys) { }); } exports.getCacheEntry = getCacheEntry; -function pipeResponseToStream(response, stream) { +function pipeResponseToStream(response, output) { return __awaiter(this, void 0, void 0, function* () { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + yield pipeline(response.message, output); }); } function downloadCache(archiveLocation, archivePath) { @@ -1664,10 +1664,6 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -<<<<<<< HEAD -======= -const glob = __importStar(__webpack_require__(281)); ->>>>>>> 9bb13c7... Fix lint issue, build .js files const io = __importStar(__webpack_require__(1)); const fs = __importStar(__webpack_require__(747)); const os = __importStar(__webpack_require__(87)); @@ -2876,6 +2872,13 @@ module.exports = require("fs"); /***/ }), +/***/ 794: +/***/ (function(module) { + +module.exports = require("stream"); + +/***/ }), + /***/ 826: /***/ (function(module, __unusedexports, __webpack_require__) { diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 702a01a..41078b3 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -1,12 +1,14 @@ import * as core from "@actions/core"; -import * as fs from "fs"; -import { BearerCredentialHandler } from "@actions/http-client/auth"; import { HttpClient, HttpCodes } from "@actions/http-client"; +import { BearerCredentialHandler } from "@actions/http-client/auth"; import { IHttpClientResponse, IRequestOptions, ITypedResponse } from "@actions/http-client/interfaces"; +import * as fs from "fs"; +import * as stream from "stream"; +import * as util from "util"; import { SocketTimeout } from "./constants"; import { @@ -109,13 +111,10 @@ export async function getCacheEntry( async function pipeResponseToStream( response: IHttpClientResponse, - stream: NodeJS.WritableStream + output: NodeJS.WritableStream ): Promise { - return new Promise(resolve => { - response.message.pipe(stream).on("close", () => { - resolve(); - }); - }); + const pipeline = util.promisify(stream.pipeline); + await pipeline(response.message, output); } export async function downloadCache( From da9f90cb83eb605958d3e7e1c09c21678745e735 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Mon, 11 May 2020 10:49:48 -0400 Subject: [PATCH 18/21] Fix upload chunk retries --- dist/restore/index.js | 110 +++++++++++++++++++++++++++-------------- dist/save/index.js | 110 +++++++++++++++++++++++++++-------------- src/cacheHttpClient.ts | 18 +++---- 3 files changed, 153 insertions(+), 85 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index 1fd26f2..d2b9af7 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1382,7 +1382,7 @@ function getContentRange(start, end) { // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } -function uploadChunk(httpClient, resourceUrl, data, start, end) { +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + @@ -1392,7 +1392,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { "Content-Range": getContentRange(start, end) }; const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); + return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); }); const response = yield uploadChunkRequest(); if (isSuccessStatusCode(response.message.statusCode)) { @@ -1435,13 +1435,12 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { fd, start, end, autoClose: false - }); - yield uploadChunk(httpClient, resourceUrl, chunk, start, end); + }), start, end); } }))); } @@ -1496,7 +1495,9 @@ class BasicCredentialHandler { this.password = password; } prepareRequest(options) { - options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64'); + options.headers['Authorization'] = + 'Basic ' + + Buffer.from(this.username + ':' + this.password).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -1532,7 +1533,8 @@ class PersonalAccessTokenCredentialHandler { // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { - options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + options.headers['Authorization'] = + 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -2001,6 +2003,7 @@ var HttpCodes; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; @@ -2025,8 +2028,18 @@ function getProxyUrl(serverUrl) { return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; -const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; @@ -2157,19 +2170,23 @@ class HttpClient { */ async request(verb, requestUrl, data, headers) { if (this._disposed) { - throw new Error("Client has already been disposed."); + throw new Error('Client has already been disposed.'); } let parsedUrl = url.parse(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. - let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; let numTries = 0; let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); // Check if it's an authentication challenge - if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (let i = 0; i < this.handlers.length; i++) { if (this.handlers[i].canHandleAuthentication(response)) { @@ -2187,21 +2204,32 @@ class HttpClient { } } let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 - && this._allowRedirects - && redirectsRemaining > 0) { - const redirectUrl = response.message.headers["location"]; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { - throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true."); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); } // we need to finish reading the response before reassigning response // which will leak the open socket. await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = await this.requestRaw(info, data); @@ -2252,8 +2280,8 @@ class HttpClient { */ requestRawWithCallback(info, data, onResult) { let socket; - if (typeof (data) === 'string') { - info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; let handleResult = (err, res) => { @@ -2266,7 +2294,7 @@ class HttpClient { let res = new HttpClientResponse(msg); handleResult(null, res); }); - req.on('socket', (sock) => { + req.on('socket', sock => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually @@ -2281,10 +2309,10 @@ class HttpClient { // res should have headers handleResult(err, null); }); - if (data && typeof (data) === 'string') { + if (data && typeof data === 'string') { req.write(data, 'utf8'); } - if (data && typeof (data) !== 'string') { + if (data && typeof data !== 'string') { data.on('close', function () { req.end(); }); @@ -2311,31 +2339,34 @@ class HttpClient { const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; - info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { - info.options.headers["user-agent"] = this.userAgent; + info.options.headers['user-agent'] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach((handler) => { + this.handlers.forEach(handler => { handler.prepareRequest(info.options); }); } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -2373,7 +2404,7 @@ class HttpClient { proxyAuth: proxyUrl.auth, host: proxyUrl.hostname, port: proxyUrl.port - }, + } }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -2400,7 +2431,9 @@ class HttpClient { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); } return agent; } @@ -2461,7 +2494,7 @@ class HttpClient { msg = contents; } else { - msg = "Failed request: (" + statusCode + ")"; + msg = 'Failed request: (' + statusCode + ')'; } let err = new Error(msg); // attach statusCode and body obj (if available) to the error object @@ -3049,12 +3082,10 @@ function getProxyUrl(reqUrl) { } let proxyVar; if (usingSsl) { - proxyVar = process.env["https_proxy"] || - process.env["HTTPS_PROXY"]; + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { - proxyVar = process.env["http_proxy"] || - process.env["HTTP_PROXY"]; + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } if (proxyVar) { proxyUrl = url.parse(proxyVar); @@ -3066,7 +3097,7 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || ''; + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } @@ -3087,7 +3118,10 @@ function checkBypass(reqUrl) { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) { + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) { return true; } diff --git a/dist/save/index.js b/dist/save/index.js index a595665..acb0391 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1381,7 +1381,7 @@ function getContentRange(start, end) { // Content-Range: bytes 0-199/* return `bytes ${start}-${end}/*`; } -function uploadChunk(httpClient, resourceUrl, data, start, end) { +function uploadChunk(httpClient, resourceUrl, openStream, start, end) { return __awaiter(this, void 0, void 0, function* () { core.debug(`Uploading chunk of size ${end - start + @@ -1391,7 +1391,7 @@ function uploadChunk(httpClient, resourceUrl, data, start, end) { "Content-Range": getContentRange(start, end) }; const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, data, additionalHeaders); + return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); }); const response = yield uploadChunkRequest(); if (isSuccessStatusCode(response.message.statusCode)) { @@ -1434,13 +1434,12 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { fd, start, end, autoClose: false - }); - yield uploadChunk(httpClient, resourceUrl, chunk, start, end); + }), start, end); } }))); } @@ -1495,7 +1494,9 @@ class BasicCredentialHandler { this.password = password; } prepareRequest(options) { - options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64'); + options.headers['Authorization'] = + 'Basic ' + + Buffer.from(this.username + ':' + this.password).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -1531,7 +1532,8 @@ class PersonalAccessTokenCredentialHandler { // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { - options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + options.headers['Authorization'] = + 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -2000,6 +2002,7 @@ var HttpCodes; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; @@ -2024,8 +2027,18 @@ function getProxyUrl(serverUrl) { return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; -const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; @@ -2156,19 +2169,23 @@ class HttpClient { */ async request(verb, requestUrl, data, headers) { if (this._disposed) { - throw new Error("Client has already been disposed."); + throw new Error('Client has already been disposed.'); } let parsedUrl = url.parse(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. - let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; let numTries = 0; let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); // Check if it's an authentication challenge - if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (let i = 0; i < this.handlers.length; i++) { if (this.handlers[i].canHandleAuthentication(response)) { @@ -2186,21 +2203,32 @@ class HttpClient { } } let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 - && this._allowRedirects - && redirectsRemaining > 0) { - const redirectUrl = response.message.headers["location"]; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { - throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true."); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); } // we need to finish reading the response before reassigning response // which will leak the open socket. await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = await this.requestRaw(info, data); @@ -2251,8 +2279,8 @@ class HttpClient { */ requestRawWithCallback(info, data, onResult) { let socket; - if (typeof (data) === 'string') { - info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; let handleResult = (err, res) => { @@ -2265,7 +2293,7 @@ class HttpClient { let res = new HttpClientResponse(msg); handleResult(null, res); }); - req.on('socket', (sock) => { + req.on('socket', sock => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually @@ -2280,10 +2308,10 @@ class HttpClient { // res should have headers handleResult(err, null); }); - if (data && typeof (data) === 'string') { + if (data && typeof data === 'string') { req.write(data, 'utf8'); } - if (data && typeof (data) !== 'string') { + if (data && typeof data !== 'string') { data.on('close', function () { req.end(); }); @@ -2310,31 +2338,34 @@ class HttpClient { const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; - info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { - info.options.headers["user-agent"] = this.userAgent; + info.options.headers['user-agent'] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach((handler) => { + this.handlers.forEach(handler => { handler.prepareRequest(info.options); }); } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -2372,7 +2403,7 @@ class HttpClient { proxyAuth: proxyUrl.auth, host: proxyUrl.hostname, port: proxyUrl.port - }, + } }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -2399,7 +2430,9 @@ class HttpClient { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); } return agent; } @@ -2460,7 +2493,7 @@ class HttpClient { msg = contents; } else { - msg = "Failed request: (" + statusCode + ")"; + msg = 'Failed request: (' + statusCode + ')'; } let err = new Error(msg); // attach statusCode and body obj (if available) to the error object @@ -3010,12 +3043,10 @@ function getProxyUrl(reqUrl) { } let proxyVar; if (usingSsl) { - proxyVar = process.env["https_proxy"] || - process.env["HTTPS_PROXY"]; + proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; } else { - proxyVar = process.env["http_proxy"] || - process.env["HTTP_PROXY"]; + proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; } if (proxyVar) { proxyUrl = url.parse(proxyVar); @@ -3027,7 +3058,7 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || ''; + let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; if (!noProxy) { return false; } @@ -3048,7 +3079,10 @@ function checkBypass(reqUrl) { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) { + for (let upperNoProxyItem of noProxy + .split(',') + .map(x => x.trim().toUpperCase()) + .filter(x => x)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) { return true; } diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 41078b3..1b34a58 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -179,7 +179,7 @@ function getContentRange(start: number, end: number): string { async function uploadChunk( httpClient: HttpClient, resourceUrl: string, - data: NodeJS.ReadableStream, + openStream: () => NodeJS.ReadableStream, start: number, end: number ): Promise { @@ -200,7 +200,7 @@ async function uploadChunk( return await httpClient.sendStream( "PATCH", resourceUrl, - data, + openStream(), additionalHeaders ); }; @@ -263,17 +263,17 @@ async function uploadFile( const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - const chunk = fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }); await uploadChunk( httpClient, resourceUrl, - chunk, + () => + fs.createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }), start, end ); From ee7a57c6158120f107592e03bf5b612fc582ff88 Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Fri, 8 May 2020 14:27:52 -0400 Subject: [PATCH 19/21] error handling for stream --- dist/restore/index.js | 6 +++++- dist/save/index.js | 6 +++++- src/cacheHttpClient.ts | 18 ++++++++++++------ 3 files changed, 22 insertions(+), 8 deletions(-) diff --git a/dist/restore/index.js b/dist/restore/index.js index d2b9af7..6d88c89 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1435,11 +1435,15 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { fd, start, end, autoClose: false + }) + .on("error", error => { + throw new Error(`Cache upload failed because file read failed with ${error.Message}`); }), start, end); } }))); diff --git a/dist/save/index.js b/dist/save/index.js index acb0391..e91ab93 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1434,11 +1434,15 @@ function uploadFile(httpClient, cacheId, archivePath) { const start = offset; const end = offset + chunkSize - 1; offset += MAX_CHUNK_SIZE; - yield uploadChunk(httpClient, resourceUrl, () => fs.createReadStream(archivePath, { + yield uploadChunk(httpClient, resourceUrl, () => fs + .createReadStream(archivePath, { fd, start, end, autoClose: false + }) + .on("error", error => { + throw new Error(`Cache upload failed because file read failed with ${error.Message}`); }), start, end); } }))); diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index 1b34a58..cecdaae 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -268,12 +268,18 @@ async function uploadFile( httpClient, resourceUrl, () => - fs.createReadStream(archivePath, { - fd, - start, - end, - autoClose: false - }), + fs + .createReadStream(archivePath, { + fd, + start, + end, + autoClose: false + }) + .on("error", error => { + throw new Error( + `Cache upload failed because file read failed with ${error.Message}` + ); + }), start, end ); From 0232e3178d8d3a56b774396ff2cc1136a0b1bbf2 Mon Sep 17 00:00:00 2001 From: Dave Hadka Date: Mon, 11 May 2020 11:11:25 -0400 Subject: [PATCH 20/21] Add retries to all API calls --- __tests__/cacheHttpsClient.test.ts | 144 ++++++++++++++++++ dist/restore/index.js | 211 +++++++++++++------------- dist/save/index.js | 229 ++++++++++++++++------------- src/cacheHttpClient.ts | 136 ++++++++++++----- 4 files changed, 480 insertions(+), 240 deletions(-) create mode 100644 __tests__/cacheHttpsClient.test.ts diff --git a/__tests__/cacheHttpsClient.test.ts b/__tests__/cacheHttpsClient.test.ts new file mode 100644 index 0000000..c9f4fac --- /dev/null +++ b/__tests__/cacheHttpsClient.test.ts @@ -0,0 +1,144 @@ +import { retry } from "../src/cacheHttpClient"; +import * as testUtils from "../src/utils/testUtils"; + +afterEach(() => { + testUtils.clearInputs(); +}); + +interface TestResponse { + statusCode: number; + result: string | null; +} + +function handleResponse( + response: TestResponse | undefined +): Promise { + if (!response) { + fail("Retry method called too many times"); + } + + if (response.statusCode === 999) { + throw Error("Test Error"); + } else { + return Promise.resolve(response); + } +} + +async function testRetryExpectingResult( + responses: Array, + expectedResult: string | null +): Promise { + responses = responses.reverse(); // Reverse responses since we pop from end + + const actualResult = await retry( + "test", + () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ); + + expect(actualResult.result).toEqual(expectedResult); +} + +async function testRetryExpectingError( + responses: Array +): Promise { + responses = responses.reverse(); // Reverse responses since we pop from end + + expect( + retry( + "test", + () => handleResponse(responses.pop()), + (response: TestResponse) => response.statusCode + ) + ).rejects.toBeInstanceOf(Error); +} + +test("retry works on successful response", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry works after retryable status code", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry fails after exhausting retries", async () => { + await testRetryExpectingError([ + { + statusCode: 503, + result: null + }, + { + statusCode: 503, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ]); +}); + +test("retry fails after non-retryable status code", async () => { + await testRetryExpectingError([ + { + statusCode: 500, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ]); +}); + +test("retry works after error", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 999, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + "Ok" + ); +}); + +test("retry returns after client error", async () => { + await testRetryExpectingResult( + [ + { + statusCode: 400, + result: null + }, + { + statusCode: 200, + result: "Ok" + } + ], + null + ); +}); diff --git a/dist/restore/index.js b/dist/restore/index.js index 6d88c89..c90f729 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -1252,20 +1252,25 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const fs = __importStar(__webpack_require__(747)); -const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const auth_1 = __webpack_require__(226); +const fs = __importStar(__webpack_require__(747)); const stream = __importStar(__webpack_require__(794)); const util = __importStar(__webpack_require__(669)); const constants_1 = __webpack_require__(694); const utils = __importStar(__webpack_require__(443)); -const constants_1 = __webpack_require__(694); function isSuccessStatusCode(statusCode) { if (!statusCode) { return false; } return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; @@ -1305,12 +1310,56 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions()); } +function retry(name, method, getStatusCode, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield method(); + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + catch (error) { + isRetryable = true; + errorMessage = error.message; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; function getCacheEntry(keys) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = yield httpClient.getJson(getCacheApiUrl(resource)); + const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource))); if (response.statusCode === 204) { return null; } @@ -1339,7 +1388,7 @@ function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(archiveLocation); + const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation)); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); @@ -1369,7 +1418,7 @@ function reserveCache(key) { const reserveCacheRequest = { key }; - const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest)); return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); }); } @@ -1391,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - }); - const response = yield uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`); - const retryResponse = yield uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`); + yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders)); }); } function parseEnvNumber(key) { @@ -1457,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest)); }); } function saveCache(cacheId, archivePath) { @@ -1499,9 +1534,7 @@ class BasicCredentialHandler { this.password = password; } prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); + options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -1537,8 +1570,7 @@ class PersonalAccessTokenCredentialHandler { // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -2007,7 +2039,6 @@ var HttpCodes; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; @@ -2032,18 +2063,8 @@ function getProxyUrl(serverUrl) { return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; +const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; +const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; @@ -2057,12 +2078,6 @@ class HttpClientResponse { this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); - this.message.on('aborted', () => { - reject("Request was aborted or closed prematurely"); - }); - this.message.on('timeout', (socket) => { - reject("Request timed out"); - }); this.message.on('end', () => { resolve(output.toString()); }); @@ -2174,23 +2189,18 @@ class HttpClient { */ async request(verb, requestUrl, data, headers) { if (this._disposed) { - throw new Error('Client has already been disposed.'); + throw new Error("Client has already been disposed."); } let parsedUrl = url.parse(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; + let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; let numTries = 0; let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { + if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (let i = 0; i < this.handlers.length; i++) { if (this.handlers[i].canHandleAuthentication(response)) { @@ -2208,32 +2218,21 @@ class HttpClient { } } let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 + && this._allowRedirects + && redirectsRemaining > 0) { + const redirectUrl = response.message.headers["location"]; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { + throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true."); } // we need to finish reading the response before reassigning response // which will leak the open socket. await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = await this.requestRaw(info, data); @@ -2284,8 +2283,8 @@ class HttpClient { */ requestRawWithCallback(info, data, onResult) { let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + if (typeof (data) === 'string') { + info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; let handleResult = (err, res) => { @@ -2298,7 +2297,7 @@ class HttpClient { let res = new HttpClientResponse(msg); handleResult(null, res); }); - req.on('socket', sock => { + req.on('socket', (sock) => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually @@ -2313,10 +2312,10 @@ class HttpClient { // res should have headers handleResult(err, null); }); - if (data && typeof data === 'string') { + if (data && typeof (data) === 'string') { req.write(data, 'utf8'); } - if (data && typeof data !== 'string') { + if (data && typeof (data) !== 'string') { data.on('close', function () { req.end(); }); @@ -2343,34 +2342,31 @@ class HttpClient { const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; + info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; + info.options.headers["user-agent"] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach(handler => { + this.handlers.forEach((handler) => { handler.prepareRequest(info.options); }); } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -2408,7 +2404,7 @@ class HttpClient { proxyAuth: proxyUrl.auth, host: proxyUrl.hostname, port: proxyUrl.port - } + }, }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -2435,9 +2431,7 @@ class HttpClient { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); + agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); } return agent; } @@ -2498,7 +2492,7 @@ class HttpClient { msg = contents; } else { - msg = 'Failed request: (' + statusCode + ')'; + msg = "Failed request: (" + statusCode + ")"; } let err = new Error(msg); // attach statusCode and body obj (if available) to the error object @@ -3006,7 +3000,6 @@ const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); const path = __importStar(__webpack_require__(622)); -const constants_1 = __webpack_require__(694); function isGnuTar() { return __awaiter(this, void 0, void 0, function* () { core.debug("Checking tar --version"); @@ -3033,7 +3026,7 @@ function getTarPath(args) { if (fs_1.existsSync(systemTar)) { return systemTar; } - else if (isGnuTar()) { + else if (yield isGnuTar()) { args.push("--force-local"); } } @@ -3041,10 +3034,10 @@ function getTarPath(args) { }); } function execTar(args) { - var _a, _b; + var _a; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath(args)}"`, args); } catch (error) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); @@ -3055,14 +3048,27 @@ function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") + ]; yield execTar(args); }); } exports.extractTar = extractTar; function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), + "." + ]; yield execTar(args); }); } @@ -3086,10 +3092,12 @@ function getProxyUrl(reqUrl) { } let proxyVar; if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + proxyVar = process.env["https_proxy"] || + process.env["HTTPS_PROXY"]; } else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + proxyVar = process.env["http_proxy"] || + process.env["HTTP_PROXY"]; } if (proxyVar) { proxyUrl = url.parse(proxyVar); @@ -3101,7 +3109,7 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || ''; if (!noProxy) { return false; } @@ -3122,10 +3130,7 @@ function checkBypass(reqUrl) { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { + for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) { return true; } diff --git a/dist/save/index.js b/dist/save/index.js index e91ab93..76c820c 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -1252,9 +1252,9 @@ var __importStar = (this && this.__importStar) || function (mod) { }; Object.defineProperty(exports, "__esModule", { value: true }); const core = __importStar(__webpack_require__(470)); -const fs = __importStar(__webpack_require__(747)); -const auth_1 = __webpack_require__(226); const http_client_1 = __webpack_require__(539); +const auth_1 = __webpack_require__(226); +const fs = __importStar(__webpack_require__(747)); const stream = __importStar(__webpack_require__(794)); const util = __importStar(__webpack_require__(669)); const constants_1 = __webpack_require__(694); @@ -1265,6 +1265,12 @@ function isSuccessStatusCode(statusCode) { } return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode) { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} function isRetryableStatusCode(statusCode) { if (!statusCode) { return false; @@ -1304,12 +1310,56 @@ function createHttpClient() { const bearerCredentialHandler = new auth_1.BearerCredentialHandler(token); return new http_client_1.HttpClient("actions/cache", [bearerCredentialHandler], getRequestOptions()); } +function retry(name, method, getStatusCode, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + let response = undefined; + let statusCode = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + while (attempt <= maxAttempts) { + try { + response = yield method(); + statusCode = getStatusCode(response); + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } + catch (error) { + isRetryable = true; + errorMessage = error.message; + } + core.debug(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + attempt++; + } + throw Error(`${name} failed: ${errorMessage}`); + }); +} +exports.retry = retry; +function retryTypedResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.statusCode, maxAttempts); + }); +} +exports.retryTypedResponse = retryTypedResponse; +function retryHttpClientResponse(name, method, maxAttempts = 2) { + return __awaiter(this, void 0, void 0, function* () { + return yield retry(name, method, (response) => response.message.statusCode, maxAttempts); + }); +} +exports.retryHttpClientResponse = retryHttpClientResponse; function getCacheEntry(keys) { var _a; return __awaiter(this, void 0, void 0, function* () { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = yield httpClient.getJson(getCacheApiUrl(resource)); + const response = yield retryTypedResponse("getCacheEntry", () => httpClient.getJson(getCacheApiUrl(resource))); if (response.statusCode === 204) { return null; } @@ -1338,7 +1388,7 @@ function downloadCache(archiveLocation, archivePath) { return __awaiter(this, void 0, void 0, function* () { const stream = fs.createWriteStream(archivePath); const httpClient = new http_client_1.HttpClient("actions/cache"); - const downloadResponse = yield httpClient.get(archiveLocation); + const downloadResponse = yield retryHttpClientResponse("downloadCache", () => httpClient.get(archiveLocation)); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(constants_1.SocketTimeout, () => { downloadResponse.message.destroy(); @@ -1368,7 +1418,7 @@ function reserveCache(key) { const reserveCacheRequest = { key }; - const response = yield httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest); + const response = yield retryTypedResponse("reserveCache", () => httpClient.postJson(getCacheApiUrl("caches"), reserveCacheRequest)); return _c = (_b = (_a = response) === null || _a === void 0 ? void 0 : _a.result) === null || _b === void 0 ? void 0 : _b.cacheId, (_c !== null && _c !== void 0 ? _c : -1); }); } @@ -1390,21 +1440,7 @@ function uploadChunk(httpClient, resourceUrl, openStream, start, end) { "Content-Type": "application/octet-stream", "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - return yield httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders); - }); - const response = yield uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug(`Received ${response.message.statusCode}, retrying chunk at offset ${start}.`); - const retryResponse = yield uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - throw new Error(`Cache service responded with ${response.message.statusCode} during chunk upload.`); + yield retryHttpClientResponse(`uploadChunk (start: ${start}, end: ${end})`, () => httpClient.sendStream("PATCH", resourceUrl, openStream(), additionalHeaders)); }); } function parseEnvNumber(key) { @@ -1456,7 +1492,7 @@ function uploadFile(httpClient, cacheId, archivePath) { function commitCache(httpClient, cacheId, filesize) { return __awaiter(this, void 0, void 0, function* () { const commitCacheRequest = { size: filesize }; - return yield httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest); + return yield retryTypedResponse("commitCache", () => httpClient.postJson(getCacheApiUrl(`caches/${cacheId.toString()}`), commitCacheRequest)); }); } function saveCache(cacheId, archivePath) { @@ -1498,9 +1534,7 @@ class BasicCredentialHandler { this.password = password; } prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); + options.headers['Authorization'] = 'Basic ' + Buffer.from(this.username + ':' + this.password).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -1536,8 +1570,7 @@ class PersonalAccessTokenCredentialHandler { // currently implements pre-authorization // TODO: support preAuth = false where it hooks on 401 prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + options.headers['Authorization'] = 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); } // This handler cannot handle 401 canHandleAuthentication(response) { @@ -2006,7 +2039,6 @@ var HttpCodes; HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; @@ -2031,18 +2063,8 @@ function getProxyUrl(serverUrl) { return proxyUrl ? proxyUrl.href : ''; } exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; +const HttpRedirectCodes = [HttpCodes.MovedPermanently, HttpCodes.ResourceMoved, HttpCodes.SeeOther, HttpCodes.TemporaryRedirect, HttpCodes.PermanentRedirect]; +const HttpResponseRetryCodes = [HttpCodes.BadGateway, HttpCodes.ServiceUnavailable, HttpCodes.GatewayTimeout]; const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; const ExponentialBackoffCeiling = 10; const ExponentialBackoffTimeSlice = 5; @@ -2056,12 +2078,6 @@ class HttpClientResponse { this.message.on('data', (chunk) => { output = Buffer.concat([output, chunk]); }); - this.message.on('aborted', () => { - reject("Request was aborted or closed prematurely"); - }); - this.message.on('timeout', (socket) => { - reject("Request timed out"); - }); this.message.on('end', () => { resolve(output.toString()); }); @@ -2173,23 +2189,18 @@ class HttpClient { */ async request(verb, requestUrl, data, headers) { if (this._disposed) { - throw new Error('Client has already been disposed.'); + throw new Error("Client has already been disposed."); } let parsedUrl = url.parse(requestUrl); let info = this._prepareRequest(verb, parsedUrl, headers); // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; + let maxTries = (this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1) ? this._maxRetries + 1 : 1; let numTries = 0; let response; while (numTries < maxTries) { response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { + if (response && response.message && response.message.statusCode === HttpCodes.Unauthorized) { let authenticationHandler; for (let i = 0; i < this.handlers.length; i++) { if (this.handlers[i].canHandleAuthentication(response)) { @@ -2207,32 +2218,21 @@ class HttpClient { } } let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 + && this._allowRedirects + && redirectsRemaining > 0) { + const redirectUrl = response.message.headers["location"]; if (!redirectUrl) { // if there's no location to redirect to, we won't break; } let parsedRedirectUrl = url.parse(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + if (parsedUrl.protocol == 'https:' && parsedUrl.protocol != parsedRedirectUrl.protocol && !this._allowRedirectDowngrade) { + throw new Error("Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true."); } // we need to finish reading the response before reassigning response // which will leak the open socket. await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } // let's make the request with the new redirectUrl info = this._prepareRequest(verb, parsedRedirectUrl, headers); response = await this.requestRaw(info, data); @@ -2283,8 +2283,8 @@ class HttpClient { */ requestRawWithCallback(info, data, onResult) { let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + if (typeof (data) === 'string') { + info.options.headers["Content-Length"] = Buffer.byteLength(data, 'utf8'); } let callbackCalled = false; let handleResult = (err, res) => { @@ -2297,7 +2297,7 @@ class HttpClient { let res = new HttpClientResponse(msg); handleResult(null, res); }); - req.on('socket', sock => { + req.on('socket', (sock) => { socket = sock; }); // If we ever get disconnected, we want the socket to timeout eventually @@ -2312,10 +2312,10 @@ class HttpClient { // res should have headers handleResult(err, null); }); - if (data && typeof data === 'string') { + if (data && typeof (data) === 'string') { req.write(data, 'utf8'); } - if (data && typeof data !== 'string') { + if (data && typeof (data) !== 'string') { data.on('close', function () { req.end(); }); @@ -2342,34 +2342,31 @@ class HttpClient { const defaultPort = usingSsl ? 443 : 80; info.options = {}; info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.port = info.parsedUrl.port ? parseInt(info.parsedUrl.port) : defaultPort; + info.options.path = (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); info.options.method = method; info.options.headers = this._mergeHeaders(headers); if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; + info.options.headers["user-agent"] = this.userAgent; } info.options.agent = this._getAgent(info.parsedUrl); // gives handlers an opportunity to participate if (this.handlers) { - this.handlers.forEach(handler => { + this.handlers.forEach((handler) => { handler.prepareRequest(info.options); }); } return info; } _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); if (this.requestOptions && this.requestOptions.headers) { return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); } return lowercaseKeys(headers || {}); } _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => (c[k.toLowerCase()] = obj[k], c), {}); let clientHeader; if (this.requestOptions && this.requestOptions.headers) { clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; @@ -2407,7 +2404,7 @@ class HttpClient { proxyAuth: proxyUrl.auth, host: proxyUrl.hostname, port: proxyUrl.port - } + }, }; let tunnelAgent; const overHttps = proxyUrl.protocol === 'https:'; @@ -2434,9 +2431,7 @@ class HttpClient { // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); + agent.options = Object.assign(agent.options || {}, { rejectUnauthorized: false }); } return agent; } @@ -2497,7 +2492,7 @@ class HttpClient { msg = contents; } else { - msg = 'Failed request: (' + statusCode + ')'; + msg = "Failed request: (" + statusCode + ")"; } let err = new Error(msg); // attach statusCode and body obj (if available) to the error object @@ -2985,7 +2980,25 @@ const core = __importStar(__webpack_require__(470)); const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); -function getTarPath() { +const path = __importStar(__webpack_require__(622)); +function isGnuTar() { + return __awaiter(this, void 0, void 0, function* () { + core.debug("Checking tar --version"); + let versionOutput = ""; + yield exec_1.exec("tar --version", [], { + ignoreReturnCode: true, + silent: true, + listeners: { + stdout: (data) => (versionOutput += data.toString()), + stderr: (data) => (versionOutput += data.toString()) + } + }); + core.debug(versionOutput.trim()); + return versionOutput.toUpperCase().includes("GNU TAR"); + }); +} +exports.isGnuTar = isGnuTar; +function getTarPath(args) { return __awaiter(this, void 0, void 0, function* () { // Explicitly use BSD Tar on Windows const IS_WINDOWS = process.platform === "win32"; @@ -2994,7 +3007,7 @@ function getTarPath() { if (fs_1.existsSync(systemTar)) { return systemTar; } - else if (isGnuTar()) { + else if (yield isGnuTar()) { args.push("--force-local"); } } @@ -3002,10 +3015,10 @@ function getTarPath() { }); } function execTar(args) { - var _a, _b; + var _a; return __awaiter(this, void 0, void 0, function* () { try { - yield exec_1.exec(`"${yield getTarPath()}"`, args); + yield exec_1.exec(`"${yield getTarPath(args)}"`, args); } catch (error) { throw new Error(`Tar failed with error: ${(_a = error) === null || _a === void 0 ? void 0 : _a.message}`); @@ -3016,14 +3029,27 @@ function extractTar(archivePath, targetDirectory) { return __awaiter(this, void 0, void 0, function* () { // Create directory to extract tar into yield io.mkdirP(targetDirectory); - const args = ["-xz", "-f", archivePath, "-C", targetDirectory]; + const args = [ + "-xz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + targetDirectory.replace(new RegExp("\\" + path.sep, "g"), "/") + ]; yield execTar(args); }); } exports.extractTar = extractTar; function createTar(archivePath, sourceDirectory) { return __awaiter(this, void 0, void 0, function* () { - const args = ["-cz", "-f", archivePath, "-C", sourceDirectory, "."]; + const args = [ + "-cz", + "-f", + archivePath.replace(new RegExp("\\" + path.sep, "g"), "/"), + "-C", + sourceDirectory.replace(new RegExp("\\" + path.sep, "g"), "/"), + "." + ]; yield execTar(args); }); } @@ -3047,10 +3073,12 @@ function getProxyUrl(reqUrl) { } let proxyVar; if (usingSsl) { - proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY']; + proxyVar = process.env["https_proxy"] || + process.env["HTTPS_PROXY"]; } else { - proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY']; + proxyVar = process.env["http_proxy"] || + process.env["HTTP_PROXY"]; } if (proxyVar) { proxyUrl = url.parse(proxyVar); @@ -3062,7 +3090,7 @@ function checkBypass(reqUrl) { if (!reqUrl.hostname) { return false; } - let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || ''; + let noProxy = process.env["no_proxy"] || process.env["NO_PROXY"] || ''; if (!noProxy) { return false; } @@ -3083,10 +3111,7 @@ function checkBypass(reqUrl) { upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`); } // Compare request host against noproxy - for (let upperNoProxyItem of noProxy - .split(',') - .map(x => x.trim().toUpperCase()) - .filter(x => x)) { + for (let upperNoProxyItem of noProxy.split(',').map(x => x.trim().toUpperCase()).filter(x => x)) { if (upperReqHosts.some(x => x === upperNoProxyItem)) { return true; } diff --git a/src/cacheHttpClient.ts b/src/cacheHttpClient.ts index cecdaae..19124e9 100644 --- a/src/cacheHttpClient.ts +++ b/src/cacheHttpClient.ts @@ -26,6 +26,13 @@ function isSuccessStatusCode(statusCode?: number): boolean { return statusCode >= 200 && statusCode < 300; } +function isServerErrorStatusCode(statusCode?: number): boolean { + if (!statusCode) { + return true; + } + return statusCode >= 500; +} + function isRetryableStatusCode(statusCode?: number): boolean { if (!statusCode) { return false; @@ -81,14 +88,83 @@ function createHttpClient(): HttpClient { ); } +export async function retry( + name: string, + method: () => Promise, + getStatusCode: (T) => number | undefined, + maxAttempts = 2 +): Promise { + let response: T | undefined = undefined; + let statusCode: number | undefined = undefined; + let isRetryable = false; + let errorMessage = ""; + let attempt = 1; + + while (attempt <= maxAttempts) { + try { + response = await method(); + statusCode = getStatusCode(response); + + if (!isServerErrorStatusCode(statusCode)) { + return response; + } + + isRetryable = isRetryableStatusCode(statusCode); + errorMessage = `Cache service responded with ${statusCode}`; + } catch (error) { + isRetryable = true; + errorMessage = error.message; + } + + core.debug( + `${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}` + ); + + if (!isRetryable) { + core.debug(`${name} - Error is not retryable`); + break; + } + + attempt++; + } + + throw Error(`${name} failed: ${errorMessage}`); +} + +export async function retryTypedResponse( + name: string, + method: () => Promise>, + maxAttempts = 2 +): Promise> { + return await retry( + name, + method, + (response: ITypedResponse) => response.statusCode, + maxAttempts + ); +} + +export async function retryHttpClientResponse( + name: string, + method: () => Promise, + maxAttempts = 2 +): Promise { + return await retry( + name, + method, + (response: IHttpClientResponse) => response.message.statusCode, + maxAttempts + ); +} + export async function getCacheEntry( keys: string[] ): Promise { const httpClient = createHttpClient(); const resource = `cache?keys=${encodeURIComponent(keys.join(","))}`; - const response = await httpClient.getJson( - getCacheApiUrl(resource) + const response = await retryTypedResponse("getCacheEntry", () => + httpClient.getJson(getCacheApiUrl(resource)) ); if (response.statusCode === 204) { return null; @@ -123,7 +199,10 @@ export async function downloadCache( ): Promise { const stream = fs.createWriteStream(archivePath); const httpClient = new HttpClient("actions/cache"); - const downloadResponse = await httpClient.get(archiveLocation); + const downloadResponse = await retryHttpClientResponse( + "downloadCache", + () => httpClient.get(archiveLocation) + ); // Abort download if no traffic received over the socket. downloadResponse.message.socket.setTimeout(SocketTimeout, () => { @@ -160,9 +239,11 @@ export async function reserveCache(key: string): Promise { const reserveCacheRequest: ReserveCacheRequest = { key }; - const response = await httpClient.postJson( - getCacheApiUrl("caches"), - reserveCacheRequest + const response = await retryTypedResponse("reserveCache", () => + httpClient.postJson( + getCacheApiUrl("caches"), + reserveCacheRequest + ) ); return response?.result?.cacheId ?? -1; } @@ -196,32 +277,15 @@ async function uploadChunk( "Content-Range": getContentRange(start, end) }; - const uploadChunkRequest = async (): Promise => { - return await httpClient.sendStream( - "PATCH", - resourceUrl, - openStream(), - additionalHeaders - ); - }; - - const response = await uploadChunkRequest(); - if (isSuccessStatusCode(response.message.statusCode)) { - return; - } - - if (isRetryableStatusCode(response.message.statusCode)) { - core.debug( - `Received ${response.message.statusCode}, retrying chunk at offset ${start}.` - ); - const retryResponse = await uploadChunkRequest(); - if (isSuccessStatusCode(retryResponse.message.statusCode)) { - return; - } - } - - throw new Error( - `Cache service responded with ${response.message.statusCode} during chunk upload.` + await retryHttpClientResponse( + `uploadChunk (start: ${start}, end: ${end})`, + () => + httpClient.sendStream( + "PATCH", + resourceUrl, + openStream(), + additionalHeaders + ) ); } @@ -298,9 +362,11 @@ async function commitCache( filesize: number ): Promise> { const commitCacheRequest: CommitCacheRequest = { size: filesize }; - return await httpClient.postJson( - getCacheApiUrl(`caches/${cacheId.toString()}`), - commitCacheRequest + return await retryTypedResponse("commitCache", () => + httpClient.postJson( + getCacheApiUrl(`caches/${cacheId.toString()}`), + commitCacheRequest + ) ); } From 3f662ca624fc3e0ca4791e54930fa9939fd9936b Mon Sep 17 00:00:00 2001 From: Aiqiao Yan Date: Tue, 12 May 2020 16:36:56 -0400 Subject: [PATCH 21/21] Add Eric's e2e test change to get more coverage --- .github/workflows/workflow.yml | 111 +++++++++++++++++++++++++++----- __tests__/create-cache-files.sh | 11 ++++ __tests__/tar.test.ts | 2 +- __tests__/verify-cache-files.sh | 30 +++++++++ dist/restore/index.js | 3 +- dist/save/index.js | 3 +- src/tar.ts | 3 +- 7 files changed, 143 insertions(+), 20 deletions(-) create mode 100755 __tests__/create-cache-files.sh create mode 100755 __tests__/verify-cache-files.sh diff --git a/.github/workflows/workflow.yml b/.github/workflows/workflow.yml index f6c5448..629953d 100644 --- a/.github/workflows/workflow.yml +++ b/.github/workflows/workflow.yml @@ -4,51 +4,130 @@ on: pull_request: branches: - master + - releases/** paths-ignore: - '**.md' push: branches: - master + - releases/** paths-ignore: - '**.md' jobs: - test: - name: Test on ${{ matrix.os }} - + # Build and unit test + build: strategy: matrix: os: [ubuntu-latest, windows-latest, macOS-latest] fail-fast: false - runs-on: ${{ matrix.os }} - steps: - - uses: actions/checkout@v1 - - - uses: actions/setup-node@v1 + - name: Checkout + uses: actions/checkout@v2 + - name: Setup Node.js + uses: actions/setup-node@v1 with: node-version: '12.x' - - - name: Get npm cache directory + - name: Determine npm cache directory id: npm-cache run: | echo "::set-output name=dir::$(npm config get cache)" - - - uses: actions/cache@v1 + - name: Restore npm cache + uses: actions/cache@v1 with: path: ${{ steps.npm-cache.outputs.dir }} key: ${{ runner.os }}-node-${{ hashFiles('**/package-lock.json') }} restore-keys: | ${{ runner.os }}-node- - - run: npm ci - - name: Prettier Format Check run: npm run format-check - - name: ESLint Check run: npm run lint - - name: Build & Test run: npm run test + + # End to end save and restore + test-save: + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Generate files + shell: bash + run: __tests__/create-cache-files.sh ${{ runner.os }} + - name: Save cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: test-cache + test-restore: + needs: test-save + strategy: + matrix: + os: [ubuntu-latest, windows-latest, macOS-latest] + fail-fast: false + runs-on: ${{ matrix.os }} + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Restore cache + uses: ./ + with: + key: test-${{ runner.os }}-${{ github.run_id }} + path: test-cache + - name: Verify cache + shell: bash + run: __tests__/verify-cache-files.sh ${{ runner.os }} + + # End to end with proxy + test-proxy-save: + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Generate files + run: __tests__/create-cache-files.sh proxy + - name: Save cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + test-proxy-restore: + needs: test-proxy-save + runs-on: ubuntu-latest + container: + image: ubuntu:latest + options: --dns 127.0.0.1 + services: + squid-proxy: + image: datadog/squid:latest + ports: + - 3128:3128 + env: + https_proxy: http://squid-proxy:3128 + steps: + - name: Checkout + uses: actions/checkout@v2 + - name: Restore cache + uses: ./ + with: + key: test-proxy-${{ github.run_id }} + path: test-cache + - name: Verify cache + run: __tests__/verify-cache-files.sh proxy \ No newline at end of file diff --git a/__tests__/create-cache-files.sh b/__tests__/create-cache-files.sh new file mode 100755 index 0000000..885a5f2 --- /dev/null +++ b/__tests__/create-cache-files.sh @@ -0,0 +1,11 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +mkdir test-cache +echo "$prefix $GITHUB_RUN_ID" > test-cache/test-file.txt \ No newline at end of file diff --git a/__tests__/tar.test.ts b/__tests__/tar.test.ts index d5d9b62..6de03c3 100644 --- a/__tests__/tar.test.ts +++ b/__tests__/tar.test.ts @@ -51,7 +51,7 @@ test("extract GNU tar", async () => { await tar.extractTar(archivePath, targetDirectory); - expect(execMock).toHaveBeenCalledTimes(2); + expect(execMock).toHaveBeenCalledTimes(1); expect(execMock).toHaveBeenLastCalledWith(`"tar"`, [ "-xz", "-f", diff --git a/__tests__/verify-cache-files.sh b/__tests__/verify-cache-files.sh new file mode 100755 index 0000000..c7b75ae --- /dev/null +++ b/__tests__/verify-cache-files.sh @@ -0,0 +1,30 @@ +#!/bin/sh + +# Validate args +prefix="$1" +if [ -z "$prefix" ]; then + echo "Must supply prefix argument" + exit 1 +fi + +# Sanity check GITHUB_RUN_ID defined +if [ -z "$GITHUB_RUN_ID" ]; then + echo "GITHUB_RUN_ID not defined" + exit 1 +fi + +# Verify file exists +file="test-cache/test-file.txt" +echo "Checking for $file" +if [ ! -e $file ]; then + echo "File does not exist" + exit 1 +fi + +# Verify file content +content="$(cat $file)" +echo "File content:\n$content" +if [ -z "$(echo $content | grep --fixed-strings "$prefix $GITHUB_RUN_ID")" ]; then + echo "Unexpected file content" + exit 1 +fi \ No newline at end of file diff --git a/dist/restore/index.js b/dist/restore/index.js index c90f729..b5e894b 100644 --- a/dist/restore/index.js +++ b/dist/restore/index.js @@ -3000,6 +3000,7 @@ const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); const path = __importStar(__webpack_require__(622)); +const tar = __importStar(__webpack_require__(943)); function isGnuTar() { return __awaiter(this, void 0, void 0, function* () { core.debug("Checking tar --version"); @@ -3026,7 +3027,7 @@ function getTarPath(args) { if (fs_1.existsSync(systemTar)) { return systemTar; } - else if (yield isGnuTar()) { + else if (yield tar.isGnuTar()) { args.push("--force-local"); } } diff --git a/dist/save/index.js b/dist/save/index.js index 76c820c..a90a6e4 100644 --- a/dist/save/index.js +++ b/dist/save/index.js @@ -2981,6 +2981,7 @@ const exec_1 = __webpack_require__(986); const io = __importStar(__webpack_require__(1)); const fs_1 = __webpack_require__(747); const path = __importStar(__webpack_require__(622)); +const tar = __importStar(__webpack_require__(943)); function isGnuTar() { return __awaiter(this, void 0, void 0, function* () { core.debug("Checking tar --version"); @@ -3007,7 +3008,7 @@ function getTarPath(args) { if (fs_1.existsSync(systemTar)) { return systemTar; } - else if (yield isGnuTar()) { + else if (yield tar.isGnuTar()) { args.push("--force-local"); } } diff --git a/src/tar.ts b/src/tar.ts index dde9b61..00bed5a 100644 --- a/src/tar.ts +++ b/src/tar.ts @@ -3,6 +3,7 @@ import { exec } from "@actions/exec"; import * as io from "@actions/io"; import { existsSync } from "fs"; import * as path from "path"; +import * as tar from "./tar"; export async function isGnuTar(): Promise { core.debug("Checking tar --version"); @@ -28,7 +29,7 @@ async function getTarPath(args: string[]): Promise { const systemTar = `${process.env["windir"]}\\System32\\tar.exe`; if (existsSync(systemTar)) { return systemTar; - } else if (await isGnuTar()) { + } else if (await tar.isGnuTar()) { args.push("--force-local"); } }