From ea542618297c64cfe792f371ade8f9310c00c4e6 Mon Sep 17 00:00:00 2001 From: Enrique Mingorance Cano Date: Mon, 9 Nov 2020 09:56:10 +0100 Subject: [PATCH] BXMSPROD-1066 @kie/build-chain-configuration-reader upgraded to 2.0.2 (#95) * full downstream flow anc CLI improved * single (compile) flow added * downstream commands treated * @kie/build-chain-configuration-reader updated to ^0.0.8 * skip functionality added * @kie/build-chain-configuration-reader to ^1.1.0 * @kie/build-chain-configuration-reader upgraded to 2.0.0 and code adapted * checkout mechanism improved * README.md properly merged * error treatment during checking out solved * @kie/build-chain-configuration-reader upgraded to 2.0.1 * version 2.2.1 * BXMSPROD-1066 definitionFile improved * @kie/build-chain-configuration-reader upgraded to 2.0.2 --- CHANGELOG.md | 2 +- dist/index.js | 49100 ++++++++++++++++++++++---------------------- package-lock.json | 6 +- package.json | 4 +- 4 files changed, 24550 insertions(+), 24562 deletions(-) diff --git a/CHANGELOG.md b/CHANGELOG.md index 3b031850..d60e41cf 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,7 +7,7 @@ - command line ready - full downstream flow added either for CLI and github action - single flow added either for CLI and github action -- @kie/build-chain-configuration-reader dependency upgraded to ^2.0.0 +- @kie/build-chain-configuration-reader dependency upgraded to ^2.0.2 - branch flow added for CLI - skip build mechanism - configuration file version <2.0 is no longer supported diff --git a/dist/index.js b/dist/index.js index eaffd4f5..ec2d2037 100755 --- a/dist/index.js +++ b/dist/index.js @@ -49,36 +49,7 @@ module.exports = /******/ }) /************************************************************************/ /******/ ([ -/* 0 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -exports.fromCallback = function (fn) { - return Object.defineProperty(function (...args) { - if (typeof args[args.length - 1] === 'function') fn.apply(this, args) - else { - return new Promise((resolve, reject) => { - fn.apply( - this, - args.concat([(err, res) => err ? reject(err) : resolve(res)]) - ) - }) - } - }, 'name', { value: fn.name }) -} - -exports.fromPromise = function (fn) { - return Object.defineProperty(function (...args) { - const cb = args[args.length - 1] - if (typeof cb !== 'function') return fn.apply(this, args) - else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) - }, 'name', { value: fn.name }) -} - - -/***/ }), +/* 0 */, /* 1 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -502,25306 +473,24957 @@ module.exports = { getEvent, createGithubInformationObject, prepareEnv }; /***/ }), /* 9 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -const events = __importStar(__webpack_require__(614)); -const child = __importStar(__webpack_require__(129)); -const path = __importStar(__webpack_require__(622)); -const io = __importStar(__webpack_require__(1)); -const ioUtil = __importStar(__webpack_require__(672)); -/* eslint-disable @typescript-eslint/unbound-method */ -const IS_WINDOWS = process.platform === 'win32'; -/* - * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. - */ -class ToolRunner extends events.EventEmitter { - constructor(toolPath, args, options) { - super(); - if (!toolPath) { - throw new Error("Parameter 'toolPath' cannot be null or empty."); - } - this.toolPath = toolPath; - this.args = args || []; - this.options = options || {}; - } - _debug(message) { - if (this.options.listeners && this.options.listeners.debug) { - this.options.listeners.debug(message); - } - } - _getCommandString(options, noPrefix) { - const toolPath = this._getSpawnFileName(); - const args = this._getSpawnArgs(options); - let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool - if (IS_WINDOWS) { - // Windows + cmd file - if (this._isCmdFile()) { - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows + verbatim - else if (options.windowsVerbatimArguments) { - cmd += `"${toolPath}"`; - for (const a of args) { - cmd += ` ${a}`; - } - } - // Windows (regular) - else { - cmd += this._windowsQuoteCmdArg(toolPath); - for (const a of args) { - cmd += ` ${this._windowsQuoteCmdArg(a)}`; - } - } - } - else { - // OSX/Linux - this can likely be improved with some form of quoting. - // creating processes on Unix is fundamentally different than Windows. - // on Unix, execvp() takes an arg array. - cmd += toolPath; - for (const a of args) { - cmd += ` ${a}`; - } - } - return cmd; - } - _processLineBuffer(data, strBuffer, onLine) { - try { - let s = strBuffer + data.toString(); - let n = s.indexOf(os.EOL); - while (n > -1) { - const line = s.substring(0, n); - onLine(line); - // the rest of the string ... - s = s.substring(n + os.EOL.length); - n = s.indexOf(os.EOL); - } - strBuffer = s; - } - catch (err) { - // streaming lines to console is best effort. Don't fail a build. - this._debug(`error processing line. Failed with error ${err}`); - } + + +var loader = __webpack_require__(457); +var dumper = __webpack_require__(685); + + +function deprecated(name) { + return function () { + throw new Error('Function ' + name + ' is deprecated and cannot be used.'); + }; +} + + +module.exports.Type = __webpack_require__(945); +module.exports.Schema = __webpack_require__(43); +module.exports.FAILSAFE_SCHEMA = __webpack_require__(581); +module.exports.JSON_SCHEMA = __webpack_require__(23); +module.exports.CORE_SCHEMA = __webpack_require__(611); +module.exports.DEFAULT_SAFE_SCHEMA = __webpack_require__(723); +module.exports.DEFAULT_FULL_SCHEMA = __webpack_require__(910); +module.exports.load = loader.load; +module.exports.loadAll = loader.loadAll; +module.exports.safeLoad = loader.safeLoad; +module.exports.safeLoadAll = loader.safeLoadAll; +module.exports.dump = dumper.dump; +module.exports.safeDump = dumper.safeDump; +module.exports.YAMLException = __webpack_require__(556); + +// Deprecated schema names from JS-YAML 2.0.x +module.exports.MINIMAL_SCHEMA = __webpack_require__(581); +module.exports.SAFE_SCHEMA = __webpack_require__(723); +module.exports.DEFAULT_SCHEMA = __webpack_require__(910); + +// Deprecated functions from JS-YAML 1.x.x +module.exports.scan = deprecated('scan'); +module.exports.parse = deprecated('parse'); +module.exports.compose = deprecated('compose'); +module.exports.addConstructor = deprecated('addConstructor'); + + +/***/ }), +/* 10 */, +/* 11 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { logger } = __webpack_require__(79); +const { execute } = __webpack_require__(81); +const { treatCommand } = __webpack_require__(52); +const { getDir } = __webpack_require__(330); +const core = __webpack_require__(470); + +async function executeBuild(rootFolder, nodeChain, projectTriggeringJob) { + const projectTriggeringJobIndex = nodeChain.findIndex( + node => node.project === projectTriggeringJob + ); + for await (const [index, node] of nodeChain.entries()) { + if (node.build && node.build.skip) { + logger.info( + `Execution skip for ${node.project}. No command will be executed.` + ); + } else { + const levelType = + index < projectTriggeringJobIndex + ? "upstream" + : index == projectTriggeringJobIndex + ? "current" + : "downstream"; + await executeNodeBuildCommands(rootFolder, node, levelType); } - _getSpawnFileName() { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - return process.env['COMSPEC'] || 'cmd.exe'; - } - } - return this.toolPath; + } +} + +async function executeBuildSpecificCommand(rootFolder, nodeChain, command) { + for await (const node of nodeChain) { + const dir = getDir(rootFolder, node.project); + await executeBuildCommands(dir, command, node.project); + } +} + +/** + * + * @param {String} rootFolder the folder path to execute command + * @param {Object} node the node to execute + * @param {String} levelType an option between upstream, current or downstream + */ +async function executeNodeBuildCommands(rootFolder, node, levelType) { + const dir = getDir(rootFolder, node.project); + if (node.build["build-command"].before) { + await executeBuildCommands( + dir, + getCommand(node.build["build-command"].before, levelType), + node.project + ); + } + await executeBuildCommands( + dir, + getCommand(node.build["build-command"], levelType), + node.project + ); + if (node.build["build-command"].after) { + await executeBuildCommands( + dir, + getCommand(node.build["build-command"].after, levelType), + node.project + ); + } +} + +function getCommand(buildCommand, levelType) { + return buildCommand[levelType] || buildCommand.current; +} + +async function executeBuildCommands(cwd, buildCommands, project) { + if (buildCommands) { + for (const command of Array.isArray(buildCommands) + ? buildCommands.filter(c => c) + : [buildCommands]) { + core.startGroup(`[${project}]. Command: '${command}' in dir ${cwd}`); + const commandTreated = treatCommand(command); + try { + await execute(cwd, commandTreated); + } catch (e) { + throw new Error( + `[${project}] error executing command '${commandTreated}'` + ); + } + core.endGroup(); } - _getSpawnArgs(options) { - if (IS_WINDOWS) { - if (this._isCmdFile()) { - let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; - for (const a of this.args) { - argline += ' '; - argline += options.windowsVerbatimArguments - ? a - : this._windowsQuoteCmdArg(a); - } - argline += '"'; - return [argline]; - } - } - return this.args; - } - _endsWith(str, end) { - return str.endsWith(end); - } - _isCmdFile() { - const upperToolPath = this.toolPath.toUpperCase(); - return (this._endsWith(upperToolPath, '.CMD') || - this._endsWith(upperToolPath, '.BAT')); - } - _windowsQuoteCmdArg(arg) { - // for .exe, apply the normal quoting rules that libuv applies - if (!this._isCmdFile()) { - return this._uvQuoteCmdArg(arg); - } - // otherwise apply quoting rules specific to the cmd.exe command line parser. - // the libuv rules are generic and are not designed specifically for cmd.exe - // command line parser. - // - // for a detailed description of the cmd.exe command line parser, refer to - // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 - // need quotes for empty arg - if (!arg) { - return '""'; - } - // determine whether the arg needs to be quoted - const cmdSpecialChars = [ - ' ', - '\t', - '&', - '(', - ')', - '[', - ']', - '{', - '}', - '^', - '=', - ';', - '!', - "'", - '+', - ',', - '`', - '~', - '|', - '<', - '>', - '"' - ]; - let needsQuotes = false; - for (const char of arg) { - if (cmdSpecialChars.some(x => x === char)) { - needsQuotes = true; - break; - } - } - // short-circuit if quotes not needed - if (!needsQuotes) { - return arg; - } - // the following quoting rules are very similar to the rules that by libuv applies. - // - // 1) wrap the string in quotes - // - // 2) double-up quotes - i.e. " => "" - // - // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately - // doesn't work well with a cmd.exe command line. - // - // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. - // for example, the command line: - // foo.exe "myarg:""my val""" - // is parsed by a .NET console app into an arg array: - // [ "myarg:\"my val\"" ] - // which is the same end result when applying libuv quoting rules. although the actual - // command line from libuv quoting rules would look like: - // foo.exe "myarg:\"my val\"" - // - // 3) double-up slashes that precede a quote, - // e.g. hello \world => "hello \world" - // hello\"world => "hello\\""world" - // hello\\"world => "hello\\\\""world" - // hello world\ => "hello world\\" - // - // technically this is not required for a cmd.exe command line, or the batch argument parser. - // the reasons for including this as a .cmd quoting rule are: - // - // a) this is optimized for the scenario where the argument is passed from the .cmd file to an - // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. - // - // b) it's what we've been doing previously (by deferring to node default behavior) and we - // haven't heard any complaints about that aspect. - // - // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be - // escaped when used on the command line directly - even though within a .cmd file % can be escaped - // by using %%. - // - // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts - // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. - // - // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would - // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the - // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args - // to an external program. - // - // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. - // % can be escaped within a .cmd file. - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; // double the slash - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '"'; // double the quote - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _uvQuoteCmdArg(arg) { - // Tool runner wraps child_process.spawn() and needs to apply the same quoting as - // Node in certain cases where the undocumented spawn option windowsVerbatimArguments - // is used. - // - // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, - // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), - // pasting copyright notice from Node within this function: - // - // Copyright Joyent, Inc. and other Node contributors. All rights reserved. - // - // Permission is hereby granted, free of charge, to any person obtaining a copy - // of this software and associated documentation files (the "Software"), to - // deal in the Software without restriction, including without limitation the - // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - // sell copies of the Software, and to permit persons to whom the Software is - // furnished to do so, subject to the following conditions: - // - // The above copyright notice and this permission notice shall be included in - // all copies or substantial portions of the Software. - // - // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - // IN THE SOFTWARE. - if (!arg) { - // Need double quotation for empty argument - return '""'; - } - if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { - // No quotation needed - return arg; - } - if (!arg.includes('"') && !arg.includes('\\')) { - // No embedded double quotes or backslashes, so I can just wrap - // quote marks around the whole thing. - return `"${arg}"`; - } - // Expected input/output: - // input : hello"world - // output: "hello\"world" - // input : hello""world - // output: "hello\"\"world" - // input : hello\world - // output: hello\world - // input : hello\\world - // output: hello\\world - // input : hello\"world - // output: "hello\\\"world" - // input : hello\\"world - // output: "hello\\\\\"world" - // input : hello world\ - // output: "hello world\\" - note the comment in libuv actually reads "hello world\" - // but it appears the comment is wrong, it should be "hello world\\" - let reverse = '"'; - let quoteHit = true; - for (let i = arg.length; i > 0; i--) { - // walk the string in reverse - reverse += arg[i - 1]; - if (quoteHit && arg[i - 1] === '\\') { - reverse += '\\'; - } - else if (arg[i - 1] === '"') { - quoteHit = true; - reverse += '\\'; - } - else { - quoteHit = false; - } - } - reverse += '"'; - return reverse - .split('') - .reverse() - .join(''); - } - _cloneExecOptions(options) { - options = options || {}; - const result = { - cwd: options.cwd || process.cwd(), - env: options.env || process.env, - silent: options.silent || false, - windowsVerbatimArguments: options.windowsVerbatimArguments || false, - failOnStdErr: options.failOnStdErr || false, - ignoreReturnCode: options.ignoreReturnCode || false, - delay: options.delay || 10000 - }; - result.outStream = options.outStream || process.stdout; - result.errStream = options.errStream || process.stderr; - return result; - } - _getSpawnOptions(options, toolPath) { - options = options || {}; - const result = {}; - result.cwd = options.cwd; - result.env = options.env; - result['windowsVerbatimArguments'] = - options.windowsVerbatimArguments || this._isCmdFile(); - if (options.windowsVerbatimArguments) { - result.argv0 = `"${toolPath}"`; - } - return result; - } - /** - * Exec a tool. - * Output will be streamed to the live console. - * Returns promise with return code - * - * @param tool path to tool to exec - * @param options optional exec options. See ExecOptions - * @returns number - */ - exec() { - return __awaiter(this, void 0, void 0, function* () { - // root the tool path if it is unrooted and contains relative pathing - if (!ioUtil.isRooted(this.toolPath) && - (this.toolPath.includes('/') || - (IS_WINDOWS && this.toolPath.includes('\\')))) { - // prefer options.cwd if it is specified, however options.cwd may also need to be rooted - this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); - } - // if the tool is only a file name, then resolve it from the PATH - // otherwise verify it exists (add extension on Windows if necessary) - this.toolPath = yield io.which(this.toolPath, true); - return new Promise((resolve, reject) => { - this._debug(`exec tool: ${this.toolPath}`); - this._debug('arguments:'); - for (const arg of this.args) { - this._debug(` ${arg}`); - } - const optionsNonNull = this._cloneExecOptions(this.options); - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); - } - const state = new ExecState(optionsNonNull, this.toolPath); - state.on('debug', (message) => { - this._debug(message); - }); - const fileName = this._getSpawnFileName(); - const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); - const stdbuffer = ''; - if (cp.stdout) { - cp.stdout.on('data', (data) => { - if (this.options.listeners && this.options.listeners.stdout) { - this.options.listeners.stdout(data); - } - if (!optionsNonNull.silent && optionsNonNull.outStream) { - optionsNonNull.outStream.write(data); - } - this._processLineBuffer(data, stdbuffer, (line) => { - if (this.options.listeners && this.options.listeners.stdline) { - this.options.listeners.stdline(line); - } - }); - }); - } - const errbuffer = ''; - if (cp.stderr) { - cp.stderr.on('data', (data) => { - state.processStderr = true; - if (this.options.listeners && this.options.listeners.stderr) { - this.options.listeners.stderr(data); - } - if (!optionsNonNull.silent && - optionsNonNull.errStream && - optionsNonNull.outStream) { - const s = optionsNonNull.failOnStdErr - ? optionsNonNull.errStream - : optionsNonNull.outStream; - s.write(data); - } - this._processLineBuffer(data, errbuffer, (line) => { - if (this.options.listeners && this.options.listeners.errline) { - this.options.listeners.errline(line); - } - }); - }); - } - cp.on('error', (err) => { - state.processError = err.message; - state.processExited = true; - state.processClosed = true; - state.CheckComplete(); - }); - cp.on('exit', (code) => { - state.processExitCode = code; - state.processExited = true; - this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); - state.CheckComplete(); - }); - cp.on('close', (code) => { - state.processExitCode = code; - state.processExited = true; - state.processClosed = true; - this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); - state.CheckComplete(); - }); - state.on('done', (error, exitCode) => { - if (stdbuffer.length > 0) { - this.emit('stdline', stdbuffer); - } - if (errbuffer.length > 0) { - this.emit('errline', errbuffer); - } - cp.removeAllListeners(); - if (error) { - reject(error); - } - else { - resolve(exitCode); - } - }); - if (this.options.input) { - if (!cp.stdin) { - throw new Error('child process missing stdin'); - } - cp.stdin.end(this.options.input); - } - }); - }); - } -} -exports.ToolRunner = ToolRunner; -/** - * Convert an arg string to an array of args. Handles escaping - * - * @param argString string of arguments - * @returns string[] array of arguments - */ -function argStringToArray(argString) { - const args = []; - let inQuotes = false; - let escaped = false; - let arg = ''; - function append(c) { - // we only escape double quotes. - if (escaped && c !== '"') { - arg += '\\'; - } - arg += c; - escaped = false; - } - for (let i = 0; i < argString.length; i++) { - const c = argString.charAt(i); - if (c === '"') { - if (!escaped) { - inQuotes = !inQuotes; - } - else { - append(c); - } - continue; - } - if (c === '\\' && escaped) { - append(c); - continue; - } - if (c === '\\' && inQuotes) { - escaped = true; - continue; - } - if (c === ' ' && !inQuotes) { - if (arg.length > 0) { - args.push(arg); - arg = ''; - } - continue; - } - append(c); - } - if (arg.length > 0) { - args.push(arg.trim()); - } - return args; -} -exports.argStringToArray = argStringToArray; -class ExecState extends events.EventEmitter { - constructor(options, toolPath) { - super(); - this.processClosed = false; // tracks whether the process has exited and stdio is closed - this.processError = ''; - this.processExitCode = 0; - this.processExited = false; // tracks whether the process has exited - this.processStderr = false; // tracks whether stderr was written to - this.delay = 10000; // 10 seconds - this.done = false; - this.timeout = null; - if (!toolPath) { - throw new Error('toolPath must not be empty'); - } - this.options = options; - this.toolPath = toolPath; - if (options.delay) { - this.delay = options.delay; - } - } - CheckComplete() { - if (this.done) { - return; - } - if (this.processClosed) { - this._setResult(); - } - else if (this.processExited) { - this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); - } - } - _debug(message) { - this.emit('debug', message); - } - _setResult() { - // determine whether there is an error - let error; - if (this.processExited) { - if (this.processError) { - error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); - } - else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { - error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); - } - else if (this.processStderr && this.options.failOnStdErr) { - error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); - } - } - // clear the timeout - if (this.timeout) { - clearTimeout(this.timeout); - this.timeout = null; - } - this.done = true; - this.emit('done', error, this.processExitCode); - } - static HandleTimeout(state) { - if (state.done) { - return; - } - if (!state.processClosed && state.processExited) { - const message = `The STDIO streams did not close within ${state.delay / - 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; - state._debug(message); - } - state._setResult(); - } -} -//# sourceMappingURL=toolrunner.js.map - -/***/ }), -/* 10 */, -/* 11 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { logger } = __webpack_require__(79); -const { execute } = __webpack_require__(81); -const { treatCommand } = __webpack_require__(52); -const { getDir } = __webpack_require__(330); -const core = __webpack_require__(470); - -async function executeBuild(rootFolder, nodeChain, projectTriggeringJob) { - const projectTriggeringJobIndex = nodeChain.findIndex( - node => node.project === projectTriggeringJob - ); - for await (const [index, node] of nodeChain.entries()) { - if (node.build && node.build.skip) { - logger.info( - `Execution skip for ${node.project}. No command will be executed.` - ); - } else { - const levelType = - index < projectTriggeringJobIndex - ? "upstream" - : index == projectTriggeringJobIndex - ? "current" - : "downstream"; - await executeNodeBuildCommands(rootFolder, node, levelType); - } - } -} - -async function executeBuildSpecificCommand(rootFolder, nodeChain, command) { - for await (const node of nodeChain) { - const dir = getDir(rootFolder, node.project); - await executeBuildCommands(dir, command, node.project); - } -} - -/** - * - * @param {String} rootFolder the folder path to execute command - * @param {Object} node the node to execute - * @param {String} levelType an option between upstream, current or downstream - */ -async function executeNodeBuildCommands(rootFolder, node, levelType) { - const dir = getDir(rootFolder, node.project); - if (node.build["build-command"].before) { - await executeBuildCommands( - dir, - getCommand(node.build["build-command"].before, levelType), - node.project - ); - } - await executeBuildCommands( - dir, - getCommand(node.build["build-command"], levelType), - node.project - ); - if (node.build["build-command"].after) { - await executeBuildCommands( - dir, - getCommand(node.build["build-command"].after, levelType), - node.project - ); - } -} - -function getCommand(buildCommand, levelType) { - return buildCommand[levelType] || buildCommand.current; -} - -async function executeBuildCommands(cwd, buildCommands, project) { - if (buildCommands) { - for (const command of Array.isArray(buildCommands) - ? buildCommands.filter(c => c) - : [buildCommands]) { - core.startGroup(`[${project}]. Command: '${command}' in dir ${cwd}`); - const commandTreated = treatCommand(command); - try { - await execute(cwd, commandTreated); - } catch (e) { - throw new Error( - `[${project}] error executing command '${commandTreated}'` - ); - } - core.endGroup(); - } - } -} - -module.exports = { executeBuild, executeBuildSpecificCommand }; - - -/***/ }), -/* 12 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var register = __webpack_require__(363) -var addHook = __webpack_require__(510) -var removeHook = __webpack_require__(763) - -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind -var bindable = bind.bind(bind) - -function bindApi (hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) - hook.api = { remove: removeHookRef } - hook.remove = removeHookRef - - ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind] - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) - }) -} - -function HookSingular () { - var singularHookName = 'h' - var singularHookState = { - registry: {} - } - var singularHook = register.bind(null, singularHookState, singularHookName) - bindApi(singularHook, singularHookState, singularHookName) - return singularHook -} - -function HookCollection () { - var state = { - registry: {} - } - - var hook = register.bind(null, state) - bindApi(hook, state) - - return hook -} - -var collectionHookDeprecationMessageDisplayed = false -function Hook () { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') - collectionHookDeprecationMessageDisplayed = true - } - return HookCollection() -} - -Hook.Singular = HookSingular.bind() -Hook.Collection = HookCollection.bind() - -module.exports = Hook -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook -module.exports.Singular = Hook.Singular -module.exports.Collection = Hook.Collection - - -/***/ }), -/* 13 */, -/* 14 */, -/* 15 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { getTree, getTreeForProject } = __webpack_require__(333); -const { parentChainFromNode } = __webpack_require__(495); - -/** - * Gets a plain node list of projects ordered by precedence - * @param {string} file - The definition file. It can be a URL or a in the filesystem. - * @param {string} project - The project name to look for. - */ -async function getOrderedListForTree(file, urlPlaceHolders = {}) { - const tree = await getTree(file, urlPlaceHolders); - return getOrderedList(tree); -} - -/** - * Gets a plain node list of projects ordered by precedence for a specific project - * @param {string} file - The definition file. It can be a URL or a in the filesystem. - * @param {string} project - The project name to look for. - */ -async function getOrderedListForProject(file, project, urlPlaceHolders = {}) { - console.log("getOrderedListForProject" ,urlPlaceHolders) - const tree = await getTreeForProject(file, project, urlPlaceHolders); - return getOrderedList([tree]); -} - -/** - * Gets a plain node list of projects ordered by precedence - * @param {string} tree - The tree to iterate - */ -async function getOrderedList(tree) { - const finalLeaves = getFinalLeavesFromTree(tree); - return finalLeaves - .map(leaf => parentChainFromNode(leaf)) - .sort((a, b) => b.length - a.length) - .reduce((acc, chain) => { - acc.push( - ...chain.filter(c => !acc.map(e => e.project).includes(c.project)) - ); - return acc; - }, []); -} - -/** - * It returns back the final leaves from a tree - * @param {Array} nodes the array of nodes - * @param {Array} latestLeavesFromTree - * @param {Array} allreadyVisitedNodes - */ -function getFinalLeavesFromTree( - nodes, - latestLeavesFromTree = [], - allreadyVisitedNodes = [] -) { - if (nodes && nodes.length > 0) { - const allreadyVisitedPreviousNodes = [...allreadyVisitedNodes]; - const notIncludedChildren = nodes - .filter(node => !allreadyVisitedPreviousNodes.includes(node.project)) - .reduce((acc, e) => { - acc.push( - ...e.children.filter( - child => !acc.map(e => e.project).includes(child.project) - ) - ); - return acc; - }, []); - allreadyVisitedNodes.push(...nodes.map(e => e.project)); - getFinalLeavesFromTree( - notIncludedChildren, - latestLeavesFromTree, - allreadyVisitedNodes - ); - - latestLeavesFromTree.push( - ...nodes.filter( - node => - (!node.children || node.children.length === 0) && - !latestLeavesFromTree.map(e => e.project).includes(node.project) - ) - ); - - return latestLeavesFromTree; - } else { - return []; - } -} - -module.exports = { - getOrderedListForProject, - getOrderedListForTree, - getOrderedList -}; - - -/***/ }), -/* 16 */ -/***/ (function(module) { - -module.exports = require("tls"); - -/***/ }), -/* 17 */, -/* 18 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - - -var yaml = __webpack_require__(25); - - -module.exports = yaml; - - -/***/ }), -/* 19 */, -/* 20 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const cp = __webpack_require__(129); -const parse = __webpack_require__(568); -const enoent = __webpack_require__(881); - -function spawn(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); - - // Hook into child process "exit" event to emit an error if the command - // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - enoent.hookChildProcess(spawned, parsed); - - return spawned; -} - -function spawnSync(command, args, options) { - // Parse the arguments - const parsed = parse(command, args, options); - - // Spawn the child process - const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); - - // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 - result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); - - return result; -} - -module.exports = spawn; -module.exports.spawn = spawn; -module.exports.sync = spawnSync; - -module.exports._parse = parse; -module.exports._enoent = enoent; - - -/***/ }), -/* 21 */, -/* 22 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const http = __webpack_require__(605); -const https = __webpack_require__(211); - -function checkUrlExist(url) { - return new Promise(resolve => { - (url.startsWith("https://") ? https : http).get(url, response => { - resolve(200 === response.statusCode); - }); - }); -} - -module.exports = { - checkUrlExist -}; - - -/***/ }), -/* 23 */, -/* 24 */, -/* 25 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - - -var loader = __webpack_require__(636); -var dumper = __webpack_require__(473); - - -function deprecated(name) { - return function () { - throw new Error('Function ' + name + ' is deprecated and cannot be used.'); - }; -} - - -module.exports.Type = __webpack_require__(547); -module.exports.Schema = __webpack_require__(917); -module.exports.FAILSAFE_SCHEMA = __webpack_require__(497); -module.exports.JSON_SCHEMA = __webpack_require__(304); -module.exports.CORE_SCHEMA = __webpack_require__(997); -module.exports.DEFAULT_SAFE_SCHEMA = __webpack_require__(829); -module.exports.DEFAULT_FULL_SCHEMA = __webpack_require__(451); -module.exports.load = loader.load; -module.exports.loadAll = loader.loadAll; -module.exports.safeLoad = loader.safeLoad; -module.exports.safeLoadAll = loader.safeLoadAll; -module.exports.dump = dumper.dump; -module.exports.safeDump = dumper.safeDump; -module.exports.YAMLException = __webpack_require__(844); - -// Deprecated schema names from JS-YAML 2.0.x -module.exports.MINIMAL_SCHEMA = __webpack_require__(497); -module.exports.SAFE_SCHEMA = __webpack_require__(829); -module.exports.DEFAULT_SCHEMA = __webpack_require__(451); - -// Deprecated functions from JS-YAML 1.x.x -module.exports.scan = deprecated('scan'); -module.exports.parse = deprecated('parse'); -module.exports.compose = deprecated('compose'); -module.exports.addConstructor = deprecated('addConstructor'); - - -/***/ }), -/* 26 */, -/* 27 */, -/* 28 */, -/* 29 */, -/* 30 */, -/* 31 */, -/* 32 */, -/* 33 */, -/* 34 */, -/* 35 */, -/* 36 */, -/* 37 */, -/* 38 */, -/* 39 */ -/***/ (function(module) { - -"use strict"; - -module.exports = opts => { - opts = opts || {}; - - const env = opts.env || process.env; - const platform = opts.platform || process.platform; - - if (platform !== 'win32') { - return 'PATH'; - } - - return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; -}; - - -/***/ }), -/* 40 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 41 */, -/* 42 */, -/* 43 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -module.exports = { - copySync: __webpack_require__(110) -} - - -/***/ }), -/* 44 */, -/* 45 */, -/* 46 */, -/* 47 */, -/* 48 */, -/* 49 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var wrappy = __webpack_require__(293) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), -/* 50 */, -/* 51 */, -/* 52 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const noTreatment = __webpack_require__(981); -const mavenTreatment = __webpack_require__(556); - -function treatCommand(command) { - let libraryToExecute = noTreatment; - if (command.match(/.*mvn .*/)) { - libraryToExecute = mavenTreatment; - } - return libraryToExecute.treat(command); -} - -module.exports = { - treatCommand -}; - - -/***/ }), -/* 53 */, -/* 54 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; -// Adapted from https://github.com/sindresorhus/make-dir -// Copyright (c) Sindre Sorhus (sindresorhus.com) -// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: -// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. - -const fs = __webpack_require__(869) -const path = __webpack_require__(622) -const atLeastNode = __webpack_require__(159) - -const useNativeRecursiveOption = atLeastNode('10.12.0') - -// https://github.com/nodejs/node/issues/8987 -// https://github.com/libuv/libuv/pull/1088 -const checkPath = pth => { - if (process.platform === 'win32') { - const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) - - if (pathHasInvalidWinCharacters) { - const error = new Error(`Path contains invalid characters: ${pth}`) - error.code = 'EINVAL' - throw error - } - } -} - -const processOptions = options => { - const defaults = { mode: 0o777 } - if (typeof options === 'number') options = { mode: options } - return { ...defaults, ...options } -} - -const permissionError = pth => { - // This replicates the exception of `fs.mkdir` with native the - // `recusive` option when run on an invalid drive under Windows. - const error = new Error(`operation not permitted, mkdir '${pth}'`) - error.code = 'EPERM' - error.errno = -4048 - error.path = pth - error.syscall = 'mkdir' - return error -} - -module.exports.makeDir = async (input, options) => { - checkPath(input) - options = processOptions(options) - - if (useNativeRecursiveOption) { - const pth = path.resolve(input) - - return fs.mkdir(pth, { - mode: options.mode, - recursive: true - }) - } - - const make = async pth => { - try { - await fs.mkdir(pth, options.mode) - } catch (error) { - if (error.code === 'EPERM') { - throw error - } - - if (error.code === 'ENOENT') { - if (path.dirname(pth) === pth) { - throw permissionError(pth) - } - - if (error.message.includes('null bytes')) { - throw error - } - - await make(path.dirname(pth)) - return make(pth) - } - - try { - const stats = await fs.stat(pth) - if (!stats.isDirectory()) { - // This error is never exposed to the user - // it is caught below, and the original error is thrown - throw new Error('The path is not a directory') - } - } catch { - throw error - } - } - } - - return make(path.resolve(input)) -} - -module.exports.makeDirSync = (input, options) => { - checkPath(input) - options = processOptions(options) - - if (useNativeRecursiveOption) { - const pth = path.resolve(input) - - return fs.mkdirSync(pth, { - mode: options.mode, - recursive: true - }) - } - - const make = pth => { - try { - fs.mkdirSync(pth, options.mode) - } catch (error) { - if (error.code === 'EPERM') { - throw error - } - - if (error.code === 'ENOENT') { - if (path.dirname(pth) === pth) { - throw permissionError(pth) - } - - if (error.message.includes('null bytes')) { - throw error - } - - make(path.dirname(pth)) - return make(pth) - } - - try { - if (!fs.statSync(pth).isDirectory()) { - // This error is never exposed to the user - // it is caught below, and the original error is thrown - throw new Error('The path is not a directory') - } - } catch { - throw error - } - } - } - - return make(path.resolve(input)) -} - - -/***/ }), -/* 55 */, -/* 56 */, -/* 57 */, -/* 58 */, -/* 59 */, -/* 60 */, -/* 61 */, -/* 62 */, -/* 63 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -/* @flow */ -/*:: - -type DotenvParseOptions = { - debug?: boolean -} - -// keys and values from src -type DotenvParseOutput = { [string]: string } - -type DotenvConfigOptions = { - path?: string, // path to .env file - encoding?: string, // encoding of .env file - debug?: string // turn on logging for debugging purposes -} - -type DotenvConfigOutput = { - parsed?: DotenvParseOutput, - error?: Error -} - -*/ - -const fs = __webpack_require__(747) -const path = __webpack_require__(622) - -function log (message /*: string */) { - console.log(`[dotenv][DEBUG] ${message}`) -} - -const NEWLINE = '\n' -const RE_INI_KEY_VAL = /^\s*([\w.-]+)\s*=\s*(.*)?\s*$/ -const RE_NEWLINES = /\\n/g -const NEWLINES_MATCH = /\n|\r|\r\n/ - -// Parses src into an Object -function parse (src /*: string | Buffer */, options /*: ?DotenvParseOptions */) /*: DotenvParseOutput */ { - const debug = Boolean(options && options.debug) - const obj = {} - - // convert Buffers before splitting into lines and processing - src.toString().split(NEWLINES_MATCH).forEach(function (line, idx) { - // matching "KEY' and 'VAL' in 'KEY=VAL' - const keyValueArr = line.match(RE_INI_KEY_VAL) - // matched? - if (keyValueArr != null) { - const key = keyValueArr[1] - // default undefined or missing values to empty string - let val = (keyValueArr[2] || '') - const end = val.length - 1 - const isDoubleQuoted = val[0] === '"' && val[end] === '"' - const isSingleQuoted = val[0] === "'" && val[end] === "'" - - // if single or double quoted, remove quotes - if (isSingleQuoted || isDoubleQuoted) { - val = val.substring(1, end) - - // if double quoted, expand newlines - if (isDoubleQuoted) { - val = val.replace(RE_NEWLINES, NEWLINE) - } - } else { - // remove surrounding whitespace - val = val.trim() - } - - obj[key] = val - } else if (debug) { - log(`did not match key and value when parsing line ${idx + 1}: ${line}`) - } - }) - - return obj -} - -// Populates process.env from .env file -function config (options /*: ?DotenvConfigOptions */) /*: DotenvConfigOutput */ { - let dotenvPath = path.resolve(process.cwd(), '.env') - let encoding /*: string */ = 'utf8' - let debug = false - - if (options) { - if (options.path != null) { - dotenvPath = options.path - } - if (options.encoding != null) { - encoding = options.encoding - } - if (options.debug != null) { - debug = true - } - } - - try { - // specifying an encoding returns a string instead of a buffer - const parsed = parse(fs.readFileSync(dotenvPath, { encoding }), { debug }) - - Object.keys(parsed).forEach(function (key) { - if (!Object.prototype.hasOwnProperty.call(process.env, key)) { - process.env[key] = parsed[key] - } else if (debug) { - log(`"${key}" is already defined in \`process.env\` and will not be overwritten`) - } - }) - - return { parsed } - } catch (e) { - return { error: e } - } -} - -module.exports.config = config -module.exports.parse = parse - - -/***/ }), -/* 64 */, -/* 65 */, -/* 66 */, -/* 67 */, -/* 68 */, -/* 69 */, -/* 70 */, -/* 71 */, -/* 72 */, -/* 73 */, -/* 74 */, -/* 75 */, -/* 76 */, -/* 77 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var common = __webpack_require__(436); -var Type = __webpack_require__(547); - -var YAML_FLOAT_PATTERN = new RegExp( - // 2.5e4, 2.5 and integers - '^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + - // .2e4, .2 - // special case, seems not from spec - '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + - // 20:59 - '|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*' + - // .inf - '|[-+]?\\.(?:inf|Inf|INF)' + - // .nan - '|\\.(?:nan|NaN|NAN))$'); - -function resolveYamlFloat(data) { - if (data === null) return false; - - if (!YAML_FLOAT_PATTERN.test(data) || - // Quick hack to not allow integers end with `_` - // Probably should update regexp & check speed - data[data.length - 1] === '_') { - return false; - } - - return true; -} - -function constructYamlFloat(data) { - var value, sign, base, digits; - - value = data.replace(/_/g, '').toLowerCase(); - sign = value[0] === '-' ? -1 : 1; - digits = []; - - if ('+-'.indexOf(value[0]) >= 0) { - value = value.slice(1); - } - - if (value === '.inf') { - return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; - - } else if (value === '.nan') { - return NaN; - - } else if (value.indexOf(':') >= 0) { - value.split(':').forEach(function (v) { - digits.unshift(parseFloat(v, 10)); - }); - - value = 0.0; - base = 1; - - digits.forEach(function (d) { - value += d * base; - base *= 60; - }); - - return sign * value; - - } - return sign * parseFloat(value, 10); -} - - -var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; - -function representYamlFloat(object, style) { - var res; - - if (isNaN(object)) { - switch (style) { - case 'lowercase': return '.nan'; - case 'uppercase': return '.NAN'; - case 'camelcase': return '.NaN'; - } - } else if (Number.POSITIVE_INFINITY === object) { - switch (style) { - case 'lowercase': return '.inf'; - case 'uppercase': return '.INF'; - case 'camelcase': return '.Inf'; - } - } else if (Number.NEGATIVE_INFINITY === object) { - switch (style) { - case 'lowercase': return '-.inf'; - case 'uppercase': return '-.INF'; - case 'camelcase': return '-.Inf'; - } - } else if (common.isNegativeZero(object)) { - return '-0.0'; - } - - res = object.toString(10); - - // JS stringifier can build scientific format without dots: 5e-100, - // while YAML requres dot: 5.e-100. Fix it with simple hack - - return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; -} - -function isFloat(object) { - return (Object.prototype.toString.call(object) === '[object Number]') && - (object % 1 !== 0 || common.isNegativeZero(object)); -} - -module.exports = new Type('tag:yaml.org,2002:float', { - kind: 'scalar', - resolve: resolveYamlFloat, - construct: constructYamlFloat, - predicate: isFloat, - represent: representYamlFloat, - defaultStyle: 'lowercase' -}); - - -/***/ }), -/* 78 */, -/* 79 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const util = __webpack_require__(669); -const process = __webpack_require__(765); - -class ClientError extends Error {} - -class TimeoutError extends Error {} - -function log(prefix, obj) { - if (process.env.NODE_ENV !== "test") { - const str = obj.map(o => (typeof o === "object" ? inspect(o) : o)); - if (prefix) { - console.log.apply(console, [prefix, ...str]); - } else { - console.log.apply(console, str); - } - } -} - -const logger = { - level: "info", - - trace: (...str) => { - if (logger.level === "trace") { - log("[TRACE]", str); - } - }, - - debug: (...str) => { - if (logger.level === "trace" || logger.level === "debug") { - log("DEBUG", str); - } - }, - - info: (...str) => log("[INFO] ", str), - warn: (...str) => log("[WARN] ", str), - - error: (...str) => { - if (str.length === 1) { - if (str[0] instanceof Error) { - if (logger.level === "trace" || logger.level === "debug") { - log(null, [str[0].stack || str[0]]); - } else { - log("[ERROR] ", [str[0].message || str[0]]); - } - } - } else { - log("[ERROR] ", str); - } - } -}; - -function inspect(obj) { - return util.inspect(obj, false, null, true); -} - -module.exports = { - ClientError, - TimeoutError, - logger -}; - - -/***/ }), -/* 80 */, -/* 81 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { logger } = __webpack_require__(79); -const exec = __webpack_require__(986); - -class ExitError extends Error { - constructor(message, code) { - super(message); - this.code = code; - } -} - -async function execute(cwd, command) { - logger.info(`Execute command '${command}' in dir '${cwd}'`); - const options = {}; - options.cwd = cwd; - await exec.exec(command, [], options); -} - -module.exports = { - ExitError, - execute -}; - - -/***/ }), -/* 82 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Sanitizes an input into a string so it can be passed into issueCommand safely - * @param input input to sanitize into a string - */ -function toCommandValue(input) { - if (input === null || input === undefined) { - return ''; - } - else if (typeof input === 'string' || input instanceof String) { - return input; - } - return JSON.stringify(input); -} -exports.toCommandValue = toCommandValue; -//# sourceMappingURL=utils.js.map - -/***/ }), -/* 83 */, -/* 84 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { ReadYamlException } = __webpack_require__(500); -const yaml = __webpack_require__(18); - -function read(fileContent) { - try { - return yaml.safeLoad(fileContent); - } catch (e) { - throw new ReadYamlException( - `error reading yaml file content. Error: ${e.message}` - ); - } -} - -module.exports = { read }; - - -/***/ }), -/* 85 */, -/* 86 */, -/* 87 */ -/***/ (function(module) { - -module.exports = require("os"); - -/***/ }), -/* 88 */, -/* 89 */, -/* 90 */, -/* 91 */, -/* 92 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const copy = __webpack_require__(774).copy -const remove = __webpack_require__(723).remove -const mkdirp = __webpack_require__(884).mkdirp -const pathExists = __webpack_require__(322).pathExists -const stat = __webpack_require__(127) - -function move (src, dest, opts, cb) { - if (typeof opts === 'function') { - cb = opts - opts = {} - } - - const overwrite = opts.overwrite || opts.clobber || false - - stat.checkPaths(src, dest, 'move', (err, stats) => { - if (err) return cb(err) - const { srcStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'move', err => { - if (err) return cb(err) - mkdirp(path.dirname(dest), err => { - if (err) return cb(err) - return doRename(src, dest, overwrite, cb) - }) - }) - }) -} - -function doRename (src, dest, overwrite, cb) { - if (overwrite) { - return remove(dest, err => { - if (err) return cb(err) - return rename(src, dest, overwrite, cb) - }) - } - pathExists(dest, (err, destExists) => { - if (err) return cb(err) - if (destExists) return cb(new Error('dest already exists.')) - return rename(src, dest, overwrite, cb) - }) -} - -function rename (src, dest, overwrite, cb) { - fs.rename(src, dest, err => { - if (!err) return cb() - if (err.code !== 'EXDEV') return cb(err) - return moveAcrossDevice(src, dest, overwrite, cb) - }) -} - -function moveAcrossDevice (src, dest, overwrite, cb) { - const opts = { - overwrite, - errorOnExist: true - } - copy(src, dest, opts, err => { - if (err) return cb(err) - return remove(src, cb) - }) -} - -module.exports = move - - -/***/ }), -/* 93 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = minimatch -minimatch.Minimatch = Minimatch - -var path = { sep: '/' } -try { - path = __webpack_require__(622) -} catch (er) {} - -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __webpack_require__(306) - -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' - -// * => any number of characters -var star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') - -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) -} - -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } -} - -function ext (a, b) { - a = a || {} - b = b || {} - var t = {} - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - return t -} - -minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return minimatch - - var orig = minimatch - - var m = function minimatch (p, pattern, options) { - return orig.minimatch(p, pattern, ext(def, options)) - } - - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - - return m -} - -Minimatch.defaults = function (def) { - if (!def || !Object.keys(def).length) return Minimatch - return minimatch.defaults(def).Minimatch -} - -function minimatch (p, pattern, options) { - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - // "" only matches "" - if (pattern.trim() === '') return p === '' - - return new Minimatch(pattern, options).match(p) -} - -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } - - if (typeof pattern !== 'string') { - throw new TypeError('glob pattern string required') - } - - if (!options) options = {} - pattern = pattern.trim() - - // windows support: need to use /, not \ - if (path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } - - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - - // make the set of regexps etc. - this.make() -} - -Minimatch.prototype.debug = function () {} - -Minimatch.prototype.make = make -function make () { - // don't do it more than once. - if (this._made) return - - var pattern = this.pattern - var options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } - - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - var set = this.globSet = this.braceExpand() - - if (options.debug) this.debug = console.error - - this.debug(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) - - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) - - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) - - this.debug(this.pattern, set) - - this.set = set -} - -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 - - if (options.nonegate) return - - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ - } - - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) -} - -Minimatch.prototype.braceExpand = braceExpand - -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} - } - } - - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - - if (typeof pattern === 'undefined') { - throw new TypeError('undefined pattern') - } - - if (options.nobrace || - !pattern.match(/\{.*\}/)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - if (pattern.length > 1024 * 64) { - throw new TypeError('pattern is too long') - } - - var options = this.options - - // shortcuts - if (!options.noglobstar && pattern === '**') return GLOBSTAR - if (pattern === '') return '' - - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this - - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } - - switch (c) { - case '/': - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - - case '\\': - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case '(': - if (inClass) { - re += '(' - continue - } - - if (!stateChar) { - re += '\\(' - continue - } - - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } - - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue - - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } - - clearStateChar() - re += '|' - continue - - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += '\\' + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } - - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - if (inClass) { - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } - } - - // finish up the class. - hasMagic = true - inClass = false - re += c - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } - - re += c - - } // switch - } // for - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) - - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '.': - case '[': - case '(': addPatternStart = true - } - - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] - - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) - - nlLast += nlAfter - - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter - - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' - } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } - - if (addPatternStart) { - re = patternStart + re - } - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } - - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } - - regExp._glob = pattern - regExp._src = re - - return regExp -} - -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() -} - -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp - } - var options = this.options - - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' - - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' - - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' - - try { - this.regexp = new RegExp(re, flags) - } catch (ex) { - this.regexp = false - } - return this.regexp -} - -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list -} - -Minimatch.prototype.match = match -function match (f, partial) { - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - var options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - var set = this.set - this.debug(this.pattern, 'set', set) - - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } - - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate -} - -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] - - this.debug(pattern, p, f) - - // should be impossible. - // some invalid regexp stuff in the set. - if (p === false) return false - - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] - - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } - - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } - - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - if (options.nocase) { - hit = f.toLowerCase() === p.toLowerCase() - } else { - hit = f === p - } - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') - return emptyFileEnd - } - - // should be unreachable. - throw new Error('wtf?') -} - -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') -} - -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -} - - -/***/ }), -/* 94 */, -/* 95 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const copySync = __webpack_require__(43).copySync -const removeSync = __webpack_require__(723).removeSync -const mkdirpSync = __webpack_require__(884).mkdirpSync -const stat = __webpack_require__(127) - -function moveSync (src, dest, opts) { - opts = opts || {} - const overwrite = opts.overwrite || opts.clobber || false - - const { srcStat } = stat.checkPathsSync(src, dest, 'move') - stat.checkParentPathsSync(src, srcStat, dest, 'move') - mkdirpSync(path.dirname(dest)) - return doRename(src, dest, overwrite) -} - -function doRename (src, dest, overwrite) { - if (overwrite) { - removeSync(dest) - return rename(src, dest, overwrite) - } - if (fs.existsSync(dest)) throw new Error('dest already exists.') - return rename(src, dest, overwrite) -} - -function rename (src, dest, overwrite) { - try { - fs.renameSync(src, dest) - } catch (err) { - if (err.code !== 'EXDEV') throw err - return moveAcrossDevice(src, dest, overwrite) - } -} - -function moveAcrossDevice (src, dest, overwrite) { - const opts = { - overwrite, - errorOnExist: true - } - copySync(src, dest, opts) - return removeSync(src) -} - -module.exports = moveSync - - -/***/ }), -/* 96 */, -/* 97 */, -/* 98 */, -/* 99 */, -/* 100 */, -/* 101 */, -/* 102 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -// For internal use, subject to change. -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -// We use any as a valid input type -/* eslint-disable @typescript-eslint/no-explicit-any */ -const fs = __importStar(__webpack_require__(747)); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(82); -function issueCommand(command, message) { - const filePath = process.env[`GITHUB_${command}`]; - if (!filePath) { - throw new Error(`Unable to find environment variable for file command ${command}`); - } - if (!fs.existsSync(filePath)) { - throw new Error(`Missing file at path: ${filePath}`); - } - fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { - encoding: 'utf8' - }); -} -exports.issueCommand = issueCommand; -//# sourceMappingURL=file-command.js.map - -/***/ }), -/* 103 */, -/* 104 */, -/* 105 */, -/* 106 */, -/* 107 */, -/* 108 */, -/* 109 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - - -const { ClientError, logger } = __webpack_require__(79); -const { - executeFromEvent: pullRequestEventFlow -} = __webpack_require__(701); -const { - executeFromEvent: fdbEventFlow -} = __webpack_require__(771); -const { - executeFromEvent: singleEventFlow -} = __webpack_require__(698); -const { - isPullRequestFlowType, - isFDFlowType, - isSingleFlowType, - getFlowType -} = __webpack_require__(933); -const { createOctokitInstance, getProcessEnvVariable } = __webpack_require__(867); -__webpack_require__(63).config(); - -async function main() { - const token = getProcessEnvVariable("GITHUB_TOKEN"); - const octokit = createOctokitInstance(token); - if (isPullRequestFlowType()) { - await pullRequestEventFlow(token, octokit, process.env); - } else if (isFDFlowType()) { - await fdbEventFlow(token, octokit, process.env); - } else if (isSingleFlowType()) { - await singleEventFlow(token, octokit, process.env); - } else { - throw new Error( - `flow type input value '${getFlowType()}' is not supported. Please check documentation.` - ); - } -} - -if (require.main === require.cache[eval('__filename')]) { - main().catch(e => { - if (e instanceof ClientError) { - process.exitCode = 2; - logger.error(e); - } else { - process.exitCode = 1; - logger.error(e); - } - }); -} - -module.exports = { main }; - - -/***/ }), -/* 110 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const mkdirsSync = __webpack_require__(884).mkdirsSync -const utimesMillisSync = __webpack_require__(946).utimesMillisSync -const stat = __webpack_require__(127) - -function copySync (src, dest, opts) { - if (typeof opts === 'function') { - opts = { filter: opts } - } - - opts = opts || {} - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n - see https://github.com/jprichardson/node-fs-extra/issues/269`) - } - - const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy') - stat.checkParentPathsSync(src, srcStat, dest, 'copy') - return handleFilterAndCopy(destStat, src, dest, opts) -} - -function handleFilterAndCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - const destParent = path.dirname(dest) - if (!fs.existsSync(destParent)) mkdirsSync(destParent) - return startCopy(destStat, src, dest, opts) -} - -function startCopy (destStat, src, dest, opts) { - if (opts.filter && !opts.filter(src, dest)) return - return getStats(destStat, src, dest, opts) -} - -function getStats (destStat, src, dest, opts) { - const statSync = opts.dereference ? fs.statSync : fs.lstatSync - const srcStat = statSync(src) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) -} - -function onFile (srcStat, destStat, src, dest, opts) { - if (!destStat) return copyFile(srcStat, src, dest, opts) - return mayCopyFile(srcStat, src, dest, opts) -} - -function mayCopyFile (srcStat, src, dest, opts) { - if (opts.overwrite) { - fs.unlinkSync(dest) - return copyFile(srcStat, src, dest, opts) - } else if (opts.errorOnExist) { - throw new Error(`'${dest}' already exists`) - } -} - -function copyFile (srcStat, src, dest, opts) { - fs.copyFileSync(src, dest) - if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) - return setDestMode(dest, srcStat.mode) -} - -function handleTimestamps (srcMode, src, dest) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) - return setDestTimestamps(src, dest) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode) { - return setDestMode(dest, srcMode | 0o200) -} - -function setDestMode (dest, srcMode) { - return fs.chmodSync(dest, srcMode) -} - -function setDestTimestamps (src, dest) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - const updatedSrcStat = fs.statSync(src) - return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) -} - -function onDir (srcStat, destStat, src, dest, opts) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) - if (destStat && !destStat.isDirectory()) { - throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) - } - return copyDir(src, dest, opts) -} - -function mkDirAndCopy (srcMode, src, dest, opts) { - fs.mkdirSync(dest) - copyDir(src, dest, opts) - return setDestMode(dest, srcMode) -} - -function copyDir (src, dest, opts) { - fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) -} - -function copyDirItem (item, src, dest, opts) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy') - return startCopy(destStat, srcItem, destItem, opts) -} - -function onLink (destStat, src, dest, opts) { - let resolvedSrc = fs.readlinkSync(src) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlinkSync(resolvedSrc, dest) - } else { - let resolvedDest - try { - resolvedDest = fs.readlinkSync(dest) - } catch (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) - throw err - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) - } - - // prevent copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) - } - return copyLink(resolvedSrc, dest) - } -} - -function copyLink (resolvedSrc, dest) { - fs.unlinkSync(dest) - return fs.symlinkSync(resolvedSrc, dest) -} - -module.exports = copySync - - -/***/ }), -/* 111 */, -/* 112 */, -/* 113 */, -/* 114 */, -/* 115 */, -/* 116 */, -/* 117 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var pathModule = __webpack_require__(622); -var isWindows = process.platform === 'win32'; -var fs = __webpack_require__(747); - -// JavaScript implementation of realpath, ported from node pre-v6 - -var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); - -function rethrow() { - // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and - // is fairly slow to generate. - var callback; - if (DEBUG) { - var backtrace = new Error; - callback = debugCallback; - } else - callback = missingCallback; - - return callback; - - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } - } - - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs - else if (!process.noDeprecation) { - var msg = 'fs: missing callback ' + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } - } -} - -function maybeCallback(cb) { - return typeof cb === 'function' ? cb : rethrow(); -} - -var normalize = pathModule.normalize; - -// Regexp that finds the next partion of a (partial) path -// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] -if (isWindows) { - var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; -} else { - var nextPartRe = /(.*?)(?:[\/]+|$)/g; -} - -// Regex to find the device root, including trailing slash. E.g. 'c:\\'. -if (isWindows) { - var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; -} else { - var splitRootRe = /^[\/]*/; -} - -exports.realpathSync = function realpathSync(p, cache) { - // make p is absolute - p = pathModule.resolve(p); - - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return cache[p]; - } - - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; - - start(); - - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; - - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstatSync(base); - knownHard[base] = true; - } - } - - // walk down the path, swapping out linked pathparts for their real - // values - // NB: p.length changes. - while (pos < p.length) { - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - continue; - } - - var resolvedLink; - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // some known symbolic link. no need to stat again. - resolvedLink = cache[base]; - } else { - var stat = fs.lstatSync(base); - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - continue; - } - - // read the link if it wasn't read before - // dev/ino always return 0 on windows, so skip the check. - var linkTarget = null; - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs.statSync(base); - linkTarget = fs.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - // track this, if given a cache. - if (cache) cache[base] = resolvedLink; - if (!isWindows) seenLinks[id] = linkTarget; - } - - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } - - if (cache) cache[original] = p; - - return p; -}; - - -exports.realpath = function realpath(p, cache, cb) { - if (typeof cb !== 'function') { - cb = maybeCallback(cache); - cache = null; - } - - // make p is absolute - p = pathModule.resolve(p); - - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return process.nextTick(cb.bind(null, null, cache[p])); - } - - var original = p, - seenLinks = {}, - knownHard = {}; - - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; - - start(); - - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; - - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstat(base, function(err) { - if (err) return cb(err); - knownHard[base] = true; - LOOP(); - }); - } else { - process.nextTick(LOOP); - } - } - - // walk down the path, swapping out linked pathparts for their real - // values - function LOOP() { - // stop if scanned past end of path - if (pos >= p.length) { - if (cache) cache[original] = p; - return cb(null, p); - } - - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - return process.nextTick(LOOP); - } - - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // known symbolic link. no need to stat again. - return gotResolvedLink(cache[base]); - } - - return fs.lstat(base, gotStat); - } - - function gotStat(err, stat) { - if (err) return cb(err); - - // if not a symlink, skip to the next path part - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - return process.nextTick(LOOP); - } - - // stat & read the link if not read before - // call gotTarget as soon as the link target is known - // dev/ino always return 0 on windows, so skip the check. - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } - } - fs.stat(base, function(err) { - if (err) return cb(err); - - fs.readlink(base, function(err, target) { - if (!isWindows) seenLinks[id] = target; - gotTarget(err, target); - }); - }); - } - - function gotTarget(err, target, base) { - if (err) return cb(err); - - var resolvedLink = pathModule.resolve(previous, target); - if (cache) cache[base] = resolvedLink; - gotResolvedLink(resolvedLink); - } - - function gotResolvedLink(resolvedLink) { - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } -}; - - -/***/ }), -/* 118 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); - -const nameMap = new Map([ - [20, ['Big Sur', '11']], - [19, ['Catalina', '10.15']], - [18, ['Mojave', '10.14']], - [17, ['High Sierra', '10.13']], - [16, ['Sierra', '10.12']], - [15, ['El Capitan', '10.11']], - [14, ['Yosemite', '10.10']], - [13, ['Mavericks', '10.9']], - [12, ['Mountain Lion', '10.8']], - [11, ['Lion', '10.7']], - [10, ['Snow Leopard', '10.6']], - [9, ['Leopard', '10.5']], - [8, ['Tiger', '10.4']], - [7, ['Panther', '10.3']], - [6, ['Jaguar', '10.2']], - [5, ['Puma', '10.1']] -]); - -const macosRelease = release => { - release = Number((release || os.release()).split('.')[0]); - - const [name, version] = nameMap.get(release); - - return { - name, - version - }; -}; - -module.exports = macosRelease; -// TODO: remove this in the next major version -module.exports.default = macosRelease; - - -/***/ }), -/* 119 */, -/* 120 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var fs = __webpack_require__(747) -var rp = __webpack_require__(302) -var minimatch = __webpack_require__(93) -var Minimatch = minimatch.Minimatch -var inherits = __webpack_require__(689) -var EE = __webpack_require__(614).EventEmitter -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(681) -var globSync = __webpack_require__(245) -var common = __webpack_require__(856) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = __webpack_require__(674) -var util = __webpack_require__(669) -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = __webpack_require__(49) - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} - -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync - -// old api surface -glob.glob = glob - -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } - - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] - } - return origin -} - -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - - if (!pattern) - return false - - if (set.length > 1) - return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } - - return false -} - -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } - - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) - - setopts(this, pattern, options) - this._didRealPath = false - - // process each pattern in the minimatch set - var n = this.minimatch.set.length - - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) - - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } - - var self = this - this._processing = 0 - - this._emitQueue = [] - this._processQueue = [] - this.paused = false - - if (this.noprocess) - return this - - if (n === 0) - return done() - - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false - - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } -} - -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return - - if (this.realpath && !this._didRealpath) - return this._realpath() - - common.finish(this) - this.emit('end', this.found) -} - -Glob.prototype._realpath = function () { - if (this._didRealpath) - return - - this._didRealpath = true - - var n = this.matches.length - if (n === 0) - return this._finish() - - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) - - function next () { - if (--n === 0) - self._finish() - } -} - -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() - - var found = Object.keys(matchset) - var self = this - var n = found.length - - if (n === 0) - return cb() - - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here - - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) -} - -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} - -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} - -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } -} - -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } -} - -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') - - if (this.aborted) - return - - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return - } - - //console.error('PROCESS %d', this._processing, pattern) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) -} - -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return cb() - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() -} - -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return - - if (isIgnored(this, e)) - return - - if (this.paused) { - this._emitQueue.push([index, e]) - return - } - - var abs = isAbsolute(e) ? e : this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) - e = abs - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) - - this.emit('match', e) -} - -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) - - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) - - if (lstatcb) - fs.lstat(abs, lstatcb) - - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() - - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} - -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return - - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return - - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() - - if (Array.isArray(c)) - return cb(null, c) - } - - var self = this - fs.readdir(abs, readdirCb(this, abs, cb)) -} - -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } -} - -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return - - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - return cb(null, entries) -} - -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return - - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } - - return cb() -} - -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} - - -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) - - var isSym = this.symlinks[abs] - var len = entries.length - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } - - cb() -} - -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { - - //console.error('ps2', prefix, exists) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} - -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return cb() - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) - - if (needDir && c === 'FILE') - return cb() - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) - } - } - - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - fs.lstat(abs, statcb) - - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) - } - } -} - -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } - - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat - - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return cb() - - return cb(null, c, stat) -} - - -/***/ }), -/* 121 */, -/* 122 */, -/* 123 */, -/* 124 */, -/* 125 */, -/* 126 */, -/* 127 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(869) -const path = __webpack_require__(622) -const util = __webpack_require__(669) -const atLeastNode = __webpack_require__(159) - -const nodeSupportsBigInt = atLeastNode('10.5.0') -const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file) -const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file) - -function getStats (src, dest) { - return Promise.all([ - stat(src), - stat(dest).catch(err => { - if (err.code === 'ENOENT') return null - throw err - }) - ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) -} - -function getStatsSync (src, dest) { - let destStat - const srcStat = statSync(src) - try { - destStat = statSync(dest) - } catch (err) { - if (err.code === 'ENOENT') return { srcStat, destStat: null } - throw err - } - return { srcStat, destStat } -} - -function checkPaths (src, dest, funcName, cb) { - util.callbackify(getStats)(src, dest, (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - if (destStat && areIdentical(srcStat, destStat)) { - return cb(new Error('Source and destination must not be the same.')) - } - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return cb(null, { srcStat, destStat }) - }) -} - -function checkPathsSync (src, dest, funcName) { - const { srcStat, destStat } = getStatsSync(src, dest) - if (destStat && areIdentical(srcStat, destStat)) { - throw new Error('Source and destination must not be the same.') - } - if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { - throw new Error(errMsg(src, dest, funcName)) - } - return { srcStat, destStat } -} - -// recursively check if dest parent is a subdirectory of src. -// It works for all file types including symlinks since it -// checks the src and dest inodes. It starts from the deepest -// parent and stops once it reaches the src parent or the root path. -function checkParentPaths (src, srcStat, dest, funcName, cb) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() - const callback = (err, destStat) => { - if (err) { - if (err.code === 'ENOENT') return cb() - return cb(err) - } - if (areIdentical(srcStat, destStat)) { - return cb(new Error(errMsg(src, dest, funcName))) - } - return checkParentPaths(src, srcStat, destParent, funcName, cb) - } - if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback) - else fs.stat(destParent, callback) -} - -function checkParentPathsSync (src, srcStat, dest, funcName) { - const srcParent = path.resolve(path.dirname(src)) - const destParent = path.resolve(path.dirname(dest)) - if (destParent === srcParent || destParent === path.parse(destParent).root) return - let destStat - try { - destStat = statSync(destParent) - } catch (err) { - if (err.code === 'ENOENT') return - throw err - } - if (areIdentical(srcStat, destStat)) { - throw new Error(errMsg(src, dest, funcName)) - } - return checkParentPathsSync(src, srcStat, destParent, funcName) -} - -function areIdentical (srcStat, destStat) { - if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { - if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) { - // definitive answer - return true - } - // Use additional heuristics if we can't use 'bigint'. - // Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER - // See issue 657 - if (destStat.size === srcStat.size && - destStat.mode === srcStat.mode && - destStat.nlink === srcStat.nlink && - destStat.atimeMs === srcStat.atimeMs && - destStat.mtimeMs === srcStat.mtimeMs && - destStat.ctimeMs === srcStat.ctimeMs && - destStat.birthtimeMs === srcStat.birthtimeMs) { - // heuristic answer - return true - } - } - return false -} - -// return true if dest is a subdir of src, otherwise false. -// It only checks the path strings. -function isSrcSubdir (src, dest) { - const srcArr = path.resolve(src).split(path.sep).filter(i => i) - const destArr = path.resolve(dest).split(path.sep).filter(i => i) - return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) -} - -function errMsg (src, dest, funcName) { - return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` -} - -module.exports = { - checkPaths, - checkPathsSync, - checkParentPaths, - checkParentPathsSync, - isSrcSubdir -} - - -/***/ }), -/* 128 */, -/* 129 */ -/***/ (function(module) { - -module.exports = require("child_process"); - -/***/ }), -/* 130 */, -/* 131 */, -/* 132 */, -/* 133 */, -/* 134 */, -/* 135 */, -/* 136 */, -/* 137 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { - checkoutDefinitionTree, - getPlaceHolders -} = __webpack_require__(330); -const { executeBuild } = __webpack_require__(11); -const { - getOrderedListForProject -} = __webpack_require__(594); - -const { printCheckoutInformation } = __webpack_require__(656); -const { logger } = __webpack_require__(79); -const core = __webpack_require__(470); -const { - archiveArtifacts -} = __webpack_require__(503); - -async function start(context, isArchiveArtifacts = true) { - core.startGroup( - `[Full Downstream Flow] Checking out ${context.config.github.groupProject} and its dependencies` - ); - const nodeChain = await getOrderedListForProject( - context.config.github.inputs.definitionFile, - context.config.github.repository, - await getPlaceHolders(context, context.config.github.inputs.definitionFile) - ); - - logger.info( - `Tree for project ${ - context.config.github.repository - }. Chain: ${nodeChain.map(node => "\n" + node.project)}` - ); - const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); - core.endGroup(); - - core.startGroup(`[Full Downstream Flow] Checkout Summary...`); - printCheckoutInformation(checkoutInfo); - core.endGroup(); - - const executionResult = await executeBuild( - context.config.rootFolder, - nodeChain, - context.config.github.repository - ) - .then(() => true) - .catch(e => e); - - if (isArchiveArtifacts) { - core.startGroup(`[Full Downstream Flow] Archiving artifacts...`); - await archiveArtifacts( - nodeChain.find(node => node.project === context.config.github.repository), - nodeChain, - executionResult === true ? ["success", "always"] : ["failure", "always"] - ); - core.endGroup(); - } else { - logger.info("Archive artifact won't be executed"); - } - - if (executionResult !== true) { - logger.error(executionResult); - throw new Error( - `Command executions have failed, please review latest execution ${executionResult}` - ); - } -} - -module.exports = { - start -}; - - -/***/ }), -/* 138 */, -/* 139 */, -/* 140 */, -/* 141 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -var net = __webpack_require__(631); -var tls = __webpack_require__(16); -var http = __webpack_require__(605); -var https = __webpack_require__(211); -var events = __webpack_require__(614); -var assert = __webpack_require__(357); -var util = __webpack_require__(669); - - -exports.httpOverHttp = httpOverHttp; -exports.httpsOverHttp = httpsOverHttp; -exports.httpOverHttps = httpOverHttps; -exports.httpsOverHttps = httpsOverHttps; - - -function httpOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - return agent; -} - -function httpsOverHttp(options) { - var agent = new TunnelingAgent(options); - agent.request = http.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - -function httpOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - return agent; -} - -function httpsOverHttps(options) { - var agent = new TunnelingAgent(options); - agent.request = https.request; - agent.createSocket = createSecureSocket; - agent.defaultPort = 443; - return agent; -} - - -function TunnelingAgent(options) { - var self = this; - self.options = options || {}; - self.proxyOptions = self.options.proxy || {}; - self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; - self.requests = []; - self.sockets = []; - - self.on('free', function onFree(socket, host, port, localAddress) { - var options = toOptions(host, port, localAddress); - for (var i = 0, len = self.requests.length; i < len; ++i) { - var pending = self.requests[i]; - if (pending.host === options.host && pending.port === options.port) { - // Detect the request to connect same origin server, - // reuse the connection. - self.requests.splice(i, 1); - pending.request.onSocket(socket); - return; - } - } - socket.destroy(); - self.removeSocket(socket); - }); -} -util.inherits(TunnelingAgent, events.EventEmitter); - -TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { - var self = this; - var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); - - if (self.sockets.length >= this.maxSockets) { - // We are over limit so we'll add it to the queue. - self.requests.push(options); - return; - } - - // If we are under maxSockets create a new one. - self.createSocket(options, function(socket) { - socket.on('free', onFree); - socket.on('close', onCloseOrRemove); - socket.on('agentRemove', onCloseOrRemove); - req.onSocket(socket); - - function onFree() { - self.emit('free', socket, options); - } - - function onCloseOrRemove(err) { - self.removeSocket(socket); - socket.removeListener('free', onFree); - socket.removeListener('close', onCloseOrRemove); - socket.removeListener('agentRemove', onCloseOrRemove); - } - }); -}; - -TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { - var self = this; - var placeholder = {}; - self.sockets.push(placeholder); - - var connectOptions = mergeOptions({}, self.proxyOptions, { - method: 'CONNECT', - path: options.host + ':' + options.port, - agent: false, - headers: { - host: options.host + ':' + options.port - } - }); - if (options.localAddress) { - connectOptions.localAddress = options.localAddress; - } - if (connectOptions.proxyAuth) { - connectOptions.headers = connectOptions.headers || {}; - connectOptions.headers['Proxy-Authorization'] = 'Basic ' + - new Buffer(connectOptions.proxyAuth).toString('base64'); - } - - debug('making CONNECT request'); - var connectReq = self.request(connectOptions); - connectReq.useChunkedEncodingByDefault = false; // for v0.6 - connectReq.once('response', onResponse); // for v0.6 - connectReq.once('upgrade', onUpgrade); // for v0.6 - connectReq.once('connect', onConnect); // for v0.7 or later - connectReq.once('error', onError); - connectReq.end(); - - function onResponse(res) { - // Very hacky. This is necessary to avoid http-parser leaks. - res.upgrade = true; - } - - function onUpgrade(res, socket, head) { - // Hacky. - process.nextTick(function() { - onConnect(res, socket, head); - }); - } - - function onConnect(res, socket, head) { - connectReq.removeAllListeners(); - socket.removeAllListeners(); - - if (res.statusCode !== 200) { - debug('tunneling socket could not be established, statusCode=%d', - res.statusCode); - socket.destroy(); - var error = new Error('tunneling socket could not be established, ' + - 'statusCode=' + res.statusCode); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - if (head.length > 0) { - debug('got illegal response body from proxy'); - socket.destroy(); - var error = new Error('got illegal response body from proxy'); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - return; - } - debug('tunneling connection has established'); - self.sockets[self.sockets.indexOf(placeholder)] = socket; - return cb(socket); - } - - function onError(cause) { - connectReq.removeAllListeners(); - - debug('tunneling socket could not be established, cause=%s\n', - cause.message, cause.stack); - var error = new Error('tunneling socket could not be established, ' + - 'cause=' + cause.message); - error.code = 'ECONNRESET'; - options.request.emit('error', error); - self.removeSocket(placeholder); - } -}; - -TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { - var pos = this.sockets.indexOf(socket) - if (pos === -1) { - return; - } - this.sockets.splice(pos, 1); - - var pending = this.requests.shift(); - if (pending) { - // If we have pending requests and a socket gets closed a new one - // needs to be created to take over in the pool for the one that closed. - this.createSocket(pending, function(socket) { - pending.request.onSocket(socket); - }); - } -}; - -function createSecureSocket(options, cb) { - var self = this; - TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { - var hostHeader = options.request.getHeader('host'); - var tlsOptions = mergeOptions({}, self.options, { - socket: socket, - servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host - }); - - // 0 is dummy port for v0.6 - var secureSocket = tls.connect(0, tlsOptions); - self.sockets[self.sockets.indexOf(socket)] = secureSocket; - cb(secureSocket); - }); -} - - -function toOptions(host, port, localAddress) { - if (typeof host === 'string') { // since v0.10 - return { - host: host, - port: port, - localAddress: localAddress - }; - } - return host; // for v0.11 or later -} - -function mergeOptions(target) { - for (var i = 1, len = arguments.length; i < len; ++i) { - var overrides = arguments[i]; - if (typeof overrides === 'object') { - var keys = Object.keys(overrides); - for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { - var k = keys[j]; - if (overrides[k] !== undefined) { - target[k] = overrides[k]; - } - } - } - } - return target; -} - - -var debug; -if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { - debug = function() { - var args = Array.prototype.slice.call(arguments); - if (typeof args[0] === 'string') { - args[0] = 'TUNNEL: ' + args[0]; - } else { - args.unshift('TUNNEL:'); - } - console.error.apply(console, args); - } -} else { - debug = function() {}; -} -exports.debug = debug; // for test - - -/***/ }), -/* 142 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -function resolveYamlBoolean(data) { - if (data === null) return false; - - var max = data.length; - - return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || - (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); -} - -function constructYamlBoolean(data) { - return data === 'true' || - data === 'True' || - data === 'TRUE'; -} - -function isBoolean(object) { - return Object.prototype.toString.call(object) === '[object Boolean]'; -} - -module.exports = new Type('tag:yaml.org,2002:bool', { - kind: 'scalar', - resolve: resolveYamlBoolean, - construct: constructYamlBoolean, - predicate: isBoolean, - represent: { - lowercase: function (object) { return object ? 'true' : 'false'; }, - uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, - camelcase: function (object) { return object ? 'True' : 'False'; } - }, - defaultStyle: 'lowercase' -}); - - -/***/ }), -/* 143 */, -/* 144 */, -/* 145 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const pump = __webpack_require__(453); -const bufferStream = __webpack_require__(966); - -class MaxBufferError extends Error { - constructor() { - super('maxBuffer exceeded'); - this.name = 'MaxBufferError'; - } -} - -function getStream(inputStream, options) { - if (!inputStream) { - return Promise.reject(new Error('Expected a stream')); - } - - options = Object.assign({maxBuffer: Infinity}, options); - - const {maxBuffer} = options; - - let stream; - return new Promise((resolve, reject) => { - const rejectPromise = error => { - if (error) { // A null check - error.bufferedData = stream.getBufferedValue(); - } - reject(error); - }; - - stream = pump(inputStream, bufferStream(options), error => { - if (error) { - rejectPromise(error); - return; - } - - resolve(); - }); - - stream.on('data', () => { - if (stream.getBufferedLength() > maxBuffer) { - rejectPromise(new MaxBufferError()); - } - }); - }).then(() => stream.getBufferedValue()); -} - -module.exports = getStream; -module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); -module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); -module.exports.MaxBufferError = MaxBufferError; - - -/***/ }), -/* 146 */, -/* 147 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var common = __webpack_require__(436); -var Type = __webpack_require__(547); - -function isHexCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || - ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || - ((0x61/* a */ <= c) && (c <= 0x66/* f */)); -} - -function isOctCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); -} - -function isDecCode(c) { - return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); -} - -function resolveYamlInteger(data) { - if (data === null) return false; - - var max = data.length, - index = 0, - hasDigits = false, - ch; - - if (!max) return false; - - ch = data[index]; - - // sign - if (ch === '-' || ch === '+') { - ch = data[++index]; - } - - if (ch === '0') { - // 0 - if (index + 1 === max) return true; - ch = data[++index]; - - // base 2, base 8, base 16 - - if (ch === 'b') { - // base 2 - index++; - - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (ch !== '0' && ch !== '1') return false; - hasDigits = true; - } - return hasDigits && ch !== '_'; - } - - - if (ch === 'x') { - // base 16 - index++; - - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isHexCode(data.charCodeAt(index))) return false; - hasDigits = true; - } - return hasDigits && ch !== '_'; - } - - // base 8 - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (!isOctCode(data.charCodeAt(index))) return false; - hasDigits = true; - } - return hasDigits && ch !== '_'; - } - - // base 10 (except 0) or base 60 - - // value should not start with `_`; - if (ch === '_') return false; - - for (; index < max; index++) { - ch = data[index]; - if (ch === '_') continue; - if (ch === ':') break; - if (!isDecCode(data.charCodeAt(index))) { - return false; - } - hasDigits = true; - } - - // Should have digits and should not end with `_` - if (!hasDigits || ch === '_') return false; - - // if !base60 - done; - if (ch !== ':') return true; - - // base60 almost not used, no needs to optimize - return /^(:[0-5]?[0-9])+$/.test(data.slice(index)); -} - -function constructYamlInteger(data) { - var value = data, sign = 1, ch, base, digits = []; - - if (value.indexOf('_') !== -1) { - value = value.replace(/_/g, ''); - } - - ch = value[0]; - - if (ch === '-' || ch === '+') { - if (ch === '-') sign = -1; - value = value.slice(1); - ch = value[0]; - } - - if (value === '0') return 0; - - if (ch === '0') { - if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); - if (value[1] === 'x') return sign * parseInt(value, 16); - return sign * parseInt(value, 8); - } - - if (value.indexOf(':') !== -1) { - value.split(':').forEach(function (v) { - digits.unshift(parseInt(v, 10)); - }); - - value = 0; - base = 1; - - digits.forEach(function (d) { - value += (d * base); - base *= 60; - }); - - return sign * value; - - } - - return sign * parseInt(value, 10); -} - -function isInteger(object) { - return (Object.prototype.toString.call(object)) === '[object Number]' && - (object % 1 === 0 && !common.isNegativeZero(object)); -} - -module.exports = new Type('tag:yaml.org,2002:int', { - kind: 'scalar', - resolve: resolveYamlInteger, - construct: constructYamlInteger, - predicate: isInteger, - represent: { - binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, - octal: function (obj) { return obj >= 0 ? '0' + obj.toString(8) : '-0' + obj.toString(8).slice(1); }, - decimal: function (obj) { return obj.toString(10); }, - /* eslint-disable max-len */ - hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } - }, - defaultStyle: 'decimal', - styleAliases: { - binary: [ 2, 'bin' ], - octal: [ 8, 'oct' ], - decimal: [ 10, 'dec' ], - hexadecimal: [ 16, 'hex' ] - } -}); - - -/***/ }), -/* 148 */, -/* 149 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const u = __webpack_require__(0).fromCallback -const path = __webpack_require__(622) -const fs = __webpack_require__(598) -const mkdir = __webpack_require__(884) - -function createFile (file, callback) { - function makeFile () { - fs.writeFile(file, '', err => { - if (err) return callback(err) - callback() - }) - } - - fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err - if (!err && stats.isFile()) return callback() - const dir = path.dirname(file) - fs.stat(dir, (err, stats) => { - if (err) { - // if the directory doesn't exist, make it - if (err.code === 'ENOENT') { - return mkdir.mkdirs(dir, err => { - if (err) return callback(err) - makeFile() - }) - } - return callback(err) - } - - if (stats.isDirectory()) makeFile() - else { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdir(dir, err => { - if (err) return callback(err) - }) - } - }) - }) -} - -function createFileSync (file) { - let stats - try { - stats = fs.statSync(file) - } catch {} - if (stats && stats.isFile()) return - - const dir = path.dirname(file) - try { - if (!fs.statSync(dir).isDirectory()) { - // parent is not a directory - // This is just to cause an internal ENOTDIR error to be thrown - fs.readdirSync(dir) - } - } catch (err) { - // If the stat call above failed because the directory doesn't exist, create it - if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) - else throw err - } - - fs.writeFileSync(file, '') -} - -module.exports = { - createFile: u(createFile), - createFileSync -} - - -/***/ }), -/* 150 */, -/* 151 */, -/* 152 */, -/* 153 */, -/* 154 */, -/* 155 */, -/* 156 */, -/* 157 */, -/* 158 */, -/* 159 */ -/***/ (function(module) { - -module.exports = r => { - const n = process.versions.node.split('.').map(x => parseInt(x, 10)) - r = r.split('.').map(x => parseInt(x, 10)) - return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2]))) -} - - -/***/ }), -/* 160 */, -/* 161 */, -/* 162 */, -/* 163 */, -/* 164 */, -/* 165 */, -/* 166 */, -/* 167 */, -/* 168 */ -/***/ (function(module) { - -"use strict"; - -const alias = ['stdin', 'stdout', 'stderr']; - -const hasAlias = opts => alias.some(x => Boolean(opts[x])); - -module.exports = opts => { - if (!opts) { - return null; - } - - if (opts.stdio && hasAlias(opts)) { - throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); - } - - if (typeof opts.stdio === 'string') { - return opts.stdio; - } - - const stdio = opts.stdio || []; - - if (!Array.isArray(stdio)) { - throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); - } - - const result = []; - const len = Math.max(stdio.length, alias.length); - - for (let i = 0; i < len; i++) { - let value = null; - - if (stdio[i] !== undefined) { - value = stdio[i]; - } else if (opts[alias[i]] !== undefined) { - value = opts[alias[i]]; - } - - result[i] = value; - } - - return result; -}; - - -/***/ }), -/* 169 */, -/* 170 */, -/* 171 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const u = __webpack_require__(0).fromPromise -const jsonFile = __webpack_require__(469) - -jsonFile.outputJson = u(__webpack_require__(695)) -jsonFile.outputJsonSync = __webpack_require__(628) -// aliases -jsonFile.outputJSON = jsonFile.outputJson -jsonFile.outputJSONSync = jsonFile.outputJsonSync -jsonFile.writeJSON = jsonFile.writeJson -jsonFile.writeJSONSync = jsonFile.writeJsonSync -jsonFile.readJSON = jsonFile.readJson -jsonFile.readJSONSync = jsonFile.readJsonSync - -module.exports = jsonFile - - -/***/ }), -/* 172 */, -/* 173 */, -/* 174 */, -/* 175 */, -/* 176 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const core_1 = __webpack_require__(470); -/** - * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded - * - * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable - * The total status of the upload/download gets displayed according to this value - * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function - */ -class StatusReporter { - constructor(displayFrequencyInMilliseconds) { - this.totalNumberOfFilesToProcess = 0; - this.processedCount = 0; - this.largeFiles = new Map(); - this.totalFileStatus = undefined; - this.largeFileStatus = undefined; - this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; - } - setTotalNumberOfFilesToProcess(fileTotal) { - this.totalNumberOfFilesToProcess = fileTotal; - } - start() { - // displays information about the total upload/download status - this.totalFileStatus = setInterval(() => { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); - core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); - }, this.displayFrequencyInMilliseconds); - // displays extra information about any large files that take a significant amount of time to upload or download every 1 second - this.largeFileStatus = setInterval(() => { - for (const value of Array.from(this.largeFiles.values())) { - core_1.info(value); - } - // delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added - this.largeFiles.clear(); - }, 1000); - } - // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload - updateLargeFileStatus(fileName, numerator, denominator) { - // display 1 decimal place without any rounding - const percentage = this.formatPercentage(numerator, denominator); - const displayInformation = `Uploading ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`; - // any previously added display information should be overwritten for the specific large file because a map is being used - this.largeFiles.set(fileName, displayInformation); - } - stop() { - if (this.totalFileStatus) { - clearInterval(this.totalFileStatus); - } - if (this.largeFileStatus) { - clearInterval(this.largeFileStatus); - } - } - incrementProcessedCount() { - this.processedCount++; - } - formatPercentage(numerator, denominator) { - // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed - return ((numerator / denominator) * 100).toFixed(4).toString(); - } -} -exports.StatusReporter = StatusReporter; -//# sourceMappingURL=status-reporter.js.map - -/***/ }), -/* 177 */, -/* 178 */, -/* 179 */, -/* 180 */, -/* 181 */, -/* 182 */, -/* 183 */, -/* 184 */, -/* 185 */, -/* 186 */, -/* 187 */, -/* 188 */, -/* 189 */, -/* 190 */, -/* 191 */, -/* 192 */, -/* 193 */, -/* 194 */, -/* 195 */, -/* 196 */, -/* 197 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = isexe -isexe.sync = sync - -var fs = __webpack_require__(747) - -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, options)) - }) -} - -function sync (path, options) { - return checkStat(fs.statSync(path), options) -} - -function checkStat (stat, options) { - return stat.isFile() && checkMode(stat, options) -} - -function checkMode (stat, options) { - var mod = stat.mode - var uid = stat.uid - var gid = stat.gid - - var myUid = options.uid !== undefined ? - options.uid : process.getuid && process.getuid() - var myGid = options.gid !== undefined ? - options.gid : process.getgid && process.getgid() - - var u = parseInt('100', 8) - var g = parseInt('010', 8) - var o = parseInt('001', 8) - var ug = u | g - - var ret = (mod & o) || - (mod & g) && gid === myGid || - (mod & u) && uid === myUid || - (mod & ug) && myUid === 0 - - return ret -} - - -/***/ }), -/* 198 */, -/* 199 */, -/* 200 */, -/* 201 */, -/* 202 */, -/* 203 */, -/* 204 */, -/* 205 */, -/* 206 */, -/* 207 */, -/* 208 */ -/***/ (function(module) { - -// This is not the set of all possible signals. -// -// It IS, however, the set of all signals that trigger -// an exit on either Linux or BSD systems. Linux is a -// superset of the signal names supported on BSD, and -// the unknown signals just fail to register, so we can -// catch that easily enough. -// -// Don't bother with SIGKILL. It's uncatchable, which -// means that we can't fire any callbacks anyway. -// -// If a user does happen to register a handler on a non- -// fatal signal like SIGWINCH or something, and then -// exit, it'll end up firing `process.emit('exit')`, so -// the handler will be fired anyway. -// -// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised -// artificially, inherently leave the process in a -// state from which it is not safe to try and enter JS -// listeners. -module.exports = [ - 'SIGABRT', - 'SIGALRM', - 'SIGHUP', - 'SIGINT', - 'SIGTERM' -] - -if (process.platform !== 'win32') { - module.exports.push( - 'SIGVTALRM', - 'SIGXCPU', - 'SIGXFSZ', - 'SIGUSR2', - 'SIGTRAP', - 'SIGSYS', - 'SIGQUIT', - 'SIGIOT' - // should detect profiler and enable/disable accordingly. - // see #21 - // 'SIGPROF' - ) -} - -if (process.platform === 'linux') { - module.exports.push( - 'SIGIO', - 'SIGPOLL', - 'SIGPWR', - 'SIGSTKFLT', - 'SIGUNUSED' - ) -} - - -/***/ }), -/* 209 */, -/* 210 */, -/* 211 */ -/***/ (function(module) { - -module.exports = require("https"); - -/***/ }), -/* 212 */, -/* 213 */, -/* 214 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const artifact_client_1 = __webpack_require__(359); -/** - * Constructs an ArtifactClient - */ -function create() { - return artifact_client_1.DefaultArtifactClient.create(); -} -exports.create = create; -//# sourceMappingURL=artifact-client.js.map - -/***/ }), -/* 215 */, -/* 216 */, -/* 217 */, -/* 218 */, -/* 219 */, -/* 220 */, -/* 221 */, -/* 222 */, -/* 223 */, -/* 224 */, -/* 225 */, -/* 226 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -class BasicCredentialHandler { - constructor(username, password) { - this.username = username; - this.password = password; - } - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + - Buffer.from(this.username + ':' + this.password).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BasicCredentialHandler = BasicCredentialHandler; -class BearerCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = 'Bearer ' + this.token; - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.BearerCredentialHandler = BearerCredentialHandler; -class PersonalAccessTokenCredentialHandler { - constructor(token) { - this.token = token; - } - // currently implements pre-authorization - // TODO: support preAuth = false where it hooks on 401 - prepareRequest(options) { - options.headers['Authorization'] = - 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); - } - // This handler cannot handle 401 - canHandleAuthentication(response) { - return false; - } - handleAuthentication(httpClient, requestInfo, objs) { - return null; - } -} -exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; - - -/***/ }), -/* 227 */, -/* 228 */, -/* 229 */, -/* 230 */, -/* 231 */, -/* 232 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -module.exports = { - // Export promiseified graceful-fs: - ...__webpack_require__(869), - // Export extra methods: - ...__webpack_require__(43), - ...__webpack_require__(774), - ...__webpack_require__(615), - ...__webpack_require__(472), - ...__webpack_require__(171), - ...__webpack_require__(884), - ...__webpack_require__(959), - ...__webpack_require__(353), - ...__webpack_require__(517), - ...__webpack_require__(322), - ...__webpack_require__(723) -} - -// Export fs.promises as a getter property so that we don't trigger -// ExperimentalWarning before fs.promises is actually accessed. -const fs = __webpack_require__(747) -if (Object.getOwnPropertyDescriptor(fs, 'promises')) { - Object.defineProperty(module.exports, 'promises', { - get () { return fs.promises } - }) -} - - -/***/ }), -/* 233 */, -/* 234 */, -/* 235 */, -/* 236 */, -/* 237 */, -/* 238 */, -/* 239 */, -/* 240 */, -/* 241 */, -/* 242 */, -/* 243 */, -/* 244 */, -/* 245 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = globSync -globSync.GlobSync = GlobSync - -var fs = __webpack_require__(747) -var rp = __webpack_require__(302) -var minimatch = __webpack_require__(93) -var Minimatch = minimatch.Minimatch -var Glob = __webpack_require__(120).Glob -var util = __webpack_require__(669) -var path = __webpack_require__(622) -var assert = __webpack_require__(357) -var isAbsolute = __webpack_require__(681) -var common = __webpack_require__(856) -var alphasort = common.alphasort -var alphasorti = common.alphasorti -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - return new GlobSync(pattern, options).found -} - -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') - - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') - - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) - - setopts(this, pattern, options) - - if (this.noprocess) - return this - - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} - -GlobSync.prototype._finish = function () { - assert(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { - try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er - } - } - }) - } - common.finish(this) -} - - -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert(this instanceof GlobSync) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return - - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break - - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } - - var remain = pattern.slice(n) - - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix - - var abs = this._makeAbs(read) - - //if ignored, skip processing - if (childrenIgnored(this, read)) - return - - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} - - -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return - - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. - - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e - } - - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } - - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) - } -} - - -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return - - var abs = this._makeAbs(e) - - if (this.mark) - e = this._mark(e) - - if (this.absolute) { - e = abs - } - - if (this.matches[index][e]) - return - - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } - - this.matches[index][e] = true - - if (this.stat) - this._stat(e) -} - - -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) - - var entries - var lstat - var stat - try { - lstat = fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null - } - } - - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym - - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) - - return entries -} - -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries - - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) - - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null - - if (Array.isArray(c)) - return c - } - - try { - return this._readdirEntries(abs, fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null - } -} - -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true - } - } - - this.cache[abs] = entries - - // mark and cache dir-ness - return entries -} - -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break - - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break - - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break - } -} - -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - - var entries = this._readdir(abs, inGlobStar) - - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return - - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) - - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) - - var len = entries.length - var isSym = this.symlinks[abs] - - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return - - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue - - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) - - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } -} - -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) - - if (!this.matches[index]) - this.matches[index] = Object.create(null) - - // If it doesn't exist, then just mark the lack of results - if (!exists) - return - - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } - - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') - - // Mark this as a match - this._emitMatch(index, prefix) -} - -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' - - if (f.length > this.maxLength) - return false - - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] - - if (Array.isArray(c)) - c = 'DIR' - - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c - - if (needDir && c === 'FILE') - return false - - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } - - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } - - if (lstat && lstat.isSymbolicLink()) { - try { - stat = fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat - } - } - - this.statCache[abs] = stat - - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - - this.cache[abs] = this.cache[abs] || c - - if (needDir && c === 'FILE') - return false - - return c -} - -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} - -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} - - -/***/ }), -/* 246 */, -/* 247 */, -/* 248 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -function resolveJavascriptRegExp(data) { - if (data === null) return false; - if (data.length === 0) return false; - - var regexp = data, - tail = /\/([gim]*)$/.exec(data), - modifiers = ''; - - // if regexp starts with '/' it can have modifiers and must be properly closed - // `/foo/gim` - modifiers tail can be maximum 3 chars - if (regexp[0] === '/') { - if (tail) modifiers = tail[1]; - - if (modifiers.length > 3) return false; - // if expression starts with /, is should be properly terminated - if (regexp[regexp.length - modifiers.length - 1] !== '/') return false; - } - - return true; -} - -function constructJavascriptRegExp(data) { - var regexp = data, - tail = /\/([gim]*)$/.exec(data), - modifiers = ''; - - // `/foo/gim` - tail can be maximum 4 chars - if (regexp[0] === '/') { - if (tail) modifiers = tail[1]; - regexp = regexp.slice(1, regexp.length - modifiers.length - 1); - } - - return new RegExp(regexp, modifiers); -} - -function representJavascriptRegExp(object /*, style*/) { - var result = '/' + object.source + '/'; - - if (object.global) result += 'g'; - if (object.multiline) result += 'm'; - if (object.ignoreCase) result += 'i'; - - return result; -} - -function isRegExp(object) { - return Object.prototype.toString.call(object) === '[object RegExp]'; -} - -module.exports = new Type('tag:yaml.org,2002:js/regexp', { - kind: 'scalar', - resolve: resolveJavascriptRegExp, - construct: constructJavascriptRegExp, - predicate: isRegExp, - represent: representJavascriptRegExp -}); - - -/***/ }), -/* 249 */, -/* 250 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var constants = __webpack_require__(619) - -var origCwd = process.cwd -var cwd = null - -var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform - -process.cwd = function() { - if (!cwd) - cwd = origCwd.call(process) - return cwd -} -try { - process.cwd() -} catch (er) {} - -var chdir = process.chdir -process.chdir = function(d) { - cwd = null - chdir.call(process, d) -} - -module.exports = patch - -function patch (fs) { - // (re-)implement some things that are known busted or missing. - - // lchmod, broken prior to 0.6.2 - // back-port the fix here. - if (constants.hasOwnProperty('O_SYMLINK') && - process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { - patchLchmod(fs) - } - - // lutimes implementation, or no-op - if (!fs.lutimes) { - patchLutimes(fs) - } - - // https://github.com/isaacs/node-graceful-fs/issues/4 - // Chown should not fail on einval or eperm if non-root. - // It should not fail on enosys ever, as this just indicates - // that a fs doesn't support the intended operation. - - fs.chown = chownFix(fs.chown) - fs.fchown = chownFix(fs.fchown) - fs.lchown = chownFix(fs.lchown) - - fs.chmod = chmodFix(fs.chmod) - fs.fchmod = chmodFix(fs.fchmod) - fs.lchmod = chmodFix(fs.lchmod) - - fs.chownSync = chownFixSync(fs.chownSync) - fs.fchownSync = chownFixSync(fs.fchownSync) - fs.lchownSync = chownFixSync(fs.lchownSync) - - fs.chmodSync = chmodFixSync(fs.chmodSync) - fs.fchmodSync = chmodFixSync(fs.fchmodSync) - fs.lchmodSync = chmodFixSync(fs.lchmodSync) - - fs.stat = statFix(fs.stat) - fs.fstat = statFix(fs.fstat) - fs.lstat = statFix(fs.lstat) - - fs.statSync = statFixSync(fs.statSync) - fs.fstatSync = statFixSync(fs.fstatSync) - fs.lstatSync = statFixSync(fs.lstatSync) - - // if lchmod/lchown do not exist, then make them no-ops - if (!fs.lchmod) { - fs.lchmod = function (path, mode, cb) { - if (cb) process.nextTick(cb) - } - fs.lchmodSync = function () {} - } - if (!fs.lchown) { - fs.lchown = function (path, uid, gid, cb) { - if (cb) process.nextTick(cb) - } - fs.lchownSync = function () {} - } - - // on Windows, A/V software can lock the directory, causing this - // to fail with an EACCES or EPERM if the directory contains newly - // created files. Try again on failure, for up to 60 seconds. - - // Set the timeout this long because some Windows Anti-Virus, such as Parity - // bit9, may lock files for up to a minute, causing npm package install - // failures. Also, take care to yield the scheduler. Windows scheduling gives - // CPU to a busy looping process, which can cause the program causing the lock - // contention to be starved of CPU by node, so the contention doesn't resolve. - if (platform === "win32") { - fs.rename = (function (fs$rename) { return function (from, to, cb) { - var start = Date.now() - var backoff = 0; - fs$rename(from, to, function CB (er) { - if (er - && (er.code === "EACCES" || er.code === "EPERM") - && Date.now() - start < 60000) { - setTimeout(function() { - fs.stat(to, function (stater, st) { - if (stater && stater.code === "ENOENT") - fs$rename(from, to, CB); - else - cb(er) - }) - }, backoff) - if (backoff < 100) - backoff += 10; - return; - } - if (cb) cb(er) - }) - }})(fs.rename) - } - - // if read() returns EAGAIN, then just try it again. - fs.read = (function (fs$read) { - function read (fd, buffer, offset, length, position, callback_) { - var callback - if (callback_ && typeof callback_ === 'function') { - var eagCounter = 0 - callback = function (er, _, __) { - if (er && er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - callback_.apply(this, arguments) - } - } - return fs$read.call(fs, fd, buffer, offset, length, position, callback) - } - - // This ensures `util.promisify` works as it does for native `fs.read`. - read.__proto__ = fs$read - return read - })(fs.read) - - fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { - var eagCounter = 0 - while (true) { - try { - return fs$readSync.call(fs, fd, buffer, offset, length, position) - } catch (er) { - if (er.code === 'EAGAIN' && eagCounter < 10) { - eagCounter ++ - continue - } - throw er - } - } - }})(fs.readSync) - - function patchLchmod (fs) { - fs.lchmod = function (path, mode, callback) { - fs.open( path - , constants.O_WRONLY | constants.O_SYMLINK - , mode - , function (err, fd) { - if (err) { - if (callback) callback(err) - return - } - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - fs.fchmod(fd, mode, function (err) { - fs.close(fd, function(err2) { - if (callback) callback(err || err2) - }) - }) - }) - } - - fs.lchmodSync = function (path, mode) { - var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) - - // prefer to return the chmod error, if one occurs, - // but still try to close, and report closing errors if they occur. - var threw = true - var ret - try { - ret = fs.fchmodSync(fd, mode) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - } - - function patchLutimes (fs) { - if (constants.hasOwnProperty("O_SYMLINK")) { - fs.lutimes = function (path, at, mt, cb) { - fs.open(path, constants.O_SYMLINK, function (er, fd) { - if (er) { - if (cb) cb(er) - return - } - fs.futimes(fd, at, mt, function (er) { - fs.close(fd, function (er2) { - if (cb) cb(er || er2) - }) - }) - }) - } - - fs.lutimesSync = function (path, at, mt) { - var fd = fs.openSync(path, constants.O_SYMLINK) - var ret - var threw = true - try { - ret = fs.futimesSync(fd, at, mt) - threw = false - } finally { - if (threw) { - try { - fs.closeSync(fd) - } catch (er) {} - } else { - fs.closeSync(fd) - } - } - return ret - } - - } else { - fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } - fs.lutimesSync = function () {} - } - } - - function chmodFix (orig) { - if (!orig) return orig - return function (target, mode, cb) { - return orig.call(fs, target, mode, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chmodFixSync (orig) { - if (!orig) return orig - return function (target, mode) { - try { - return orig.call(fs, target, mode) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - - function chownFix (orig) { - if (!orig) return orig - return function (target, uid, gid, cb) { - return orig.call(fs, target, uid, gid, function (er) { - if (chownErOk(er)) er = null - if (cb) cb.apply(this, arguments) - }) - } - } - - function chownFixSync (orig) { - if (!orig) return orig - return function (target, uid, gid) { - try { - return orig.call(fs, target, uid, gid) - } catch (er) { - if (!chownErOk(er)) throw er - } - } - } - - function statFix (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - function callback (er, stats) { - if (stats) { - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - } - if (cb) cb.apply(this, arguments) - } - return options ? orig.call(fs, target, options, callback) - : orig.call(fs, target, callback) - } - } - - function statFixSync (orig) { - if (!orig) return orig - // Older versions of Node erroneously returned signed integers for - // uid + gid. - return function (target, options) { - var stats = options ? orig.call(fs, target, options) - : orig.call(fs, target) - if (stats.uid < 0) stats.uid += 0x100000000 - if (stats.gid < 0) stats.gid += 0x100000000 - return stats; - } - } - - // ENOSYS means that the fs doesn't support the op. Just ignore - // that, because it doesn't matter. - // - // if there's no getuid, or if getuid() is something other - // than 0, and the error is EINVAL or EPERM, then just ignore - // it. - // - // This specific case is a silent failure in cp, install, tar, - // and most other unix tools that manage permissions. - // - // When running as root, or if other types of errors are - // encountered, then it's strict. - function chownErOk (er) { - if (!er) - return true - - if (er.code === "ENOSYS") - return true - - var nonroot = !process.getuid || process.getuid() !== 0 - if (nonroot) { - if (er.code === "EINVAL" || er.code === "EPERM") - return true - } - - return false - } -} - - -/***/ }), -/* 251 */, -/* 252 */, -/* 253 */, -/* 254 */, -/* 255 */, -/* 256 */, -/* 257 */, -/* 258 */, -/* 259 */, -/* 260 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -// Note: since nyc uses this module to output coverage, any lines -// that are in the direct sync flow of nyc's outputCoverage are -// ignored, since we can never get coverage for them. -var assert = __webpack_require__(357) -var signals = __webpack_require__(208) -var isWin = /^win/i.test(process.platform) - -var EE = __webpack_require__(614) -/* istanbul ignore if */ -if (typeof EE !== 'function') { - EE = EE.EventEmitter -} - -var emitter -if (process.__signal_exit_emitter__) { - emitter = process.__signal_exit_emitter__ -} else { - emitter = process.__signal_exit_emitter__ = new EE() - emitter.count = 0 - emitter.emitted = {} -} - -// Because this emitter is a global, we have to check to see if a -// previous version of this library failed to enable infinite listeners. -// I know what you're about to say. But literally everything about -// signal-exit is a compromise with evil. Get used to it. -if (!emitter.infinite) { - emitter.setMaxListeners(Infinity) - emitter.infinite = true -} - -module.exports = function (cb, opts) { - assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') - - if (loaded === false) { - load() - } - - var ev = 'exit' - if (opts && opts.alwaysLast) { - ev = 'afterexit' - } - - var remove = function () { - emitter.removeListener(ev, cb) - if (emitter.listeners('exit').length === 0 && - emitter.listeners('afterexit').length === 0) { - unload() - } - } - emitter.on(ev, cb) - - return remove -} - -module.exports.unload = unload -function unload () { - if (!loaded) { - return - } - loaded = false - - signals.forEach(function (sig) { - try { - process.removeListener(sig, sigListeners[sig]) - } catch (er) {} - }) - process.emit = originalProcessEmit - process.reallyExit = originalProcessReallyExit - emitter.count -= 1 -} - -function emit (event, code, signal) { - if (emitter.emitted[event]) { - return - } - emitter.emitted[event] = true - emitter.emit(event, code, signal) -} - -// { : , ... } -var sigListeners = {} -signals.forEach(function (sig) { - sigListeners[sig] = function listener () { - // If there are no other listeners, an exit is coming! - // Simplest way: remove us and then re-send the signal. - // We know that this will kill the process, so we can - // safely emit now. - var listeners = process.listeners(sig) - if (listeners.length === emitter.count) { - unload() - emit('exit', null, sig) - /* istanbul ignore next */ - emit('afterexit', null, sig) - /* istanbul ignore next */ - if (isWin && sig === 'SIGHUP') { - // "SIGHUP" throws an `ENOSYS` error on Windows, - // so use a supported signal instead - sig = 'SIGINT' - } - process.kill(process.pid, sig) - } - } -}) - -module.exports.signals = function () { - return signals -} - -module.exports.load = load - -var loaded = false - -function load () { - if (loaded) { - return - } - loaded = true - - // This is the number of onSignalExit's that are in play. - // It's important so that we can count the correct number of - // listeners on signals, and don't wait for the other one to - // handle it instead of us. - emitter.count += 1 - - signals = signals.filter(function (sig) { - try { - process.on(sig, sigListeners[sig]) - return true - } catch (er) { - return false - } - }) - - process.emit = processEmit - process.reallyExit = processReallyExit -} - -var originalProcessReallyExit = process.reallyExit -function processReallyExit (code) { - process.exitCode = code || 0 - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - /* istanbul ignore next */ - originalProcessReallyExit.call(process, process.exitCode) -} - -var originalProcessEmit = process.emit -function processEmit (ev, arg) { - if (ev === 'exit') { - if (arg !== undefined) { - process.exitCode = arg - } - var ret = originalProcessEmit.apply(this, arguments) - emit('exit', process.exitCode, null) - /* istanbul ignore next */ - emit('afterexit', process.exitCode, null) - return ret - } else { - return originalProcessEmit.apply(this, arguments) - } -} - - -/***/ }), -/* 261 */, -/* 262 */, -/* 263 */, -/* 264 */, -/* 265 */, -/* 266 */, -/* 267 */, -/* 268 */, -/* 269 */, -/* 270 */, -/* 271 */, -/* 272 */, -/* 273 */, -/* 274 */, -/* 275 */, -/* 276 */, -/* 277 */, -/* 278 */, -/* 279 */, -/* 280 */ -/***/ (function(module, exports) { - -exports = module.exports = SemVer - -var debug -/* istanbul ignore next */ -if (typeof process === 'object' && - process.env && - process.env.NODE_DEBUG && - /\bsemver\b/i.test(process.env.NODE_DEBUG)) { - debug = function () { - var args = Array.prototype.slice.call(arguments, 0) - args.unshift('SEMVER') - console.log.apply(console, args) - } -} else { - debug = function () {} -} - -// Note: this is the semver.org version of the spec that it implements -// Not necessarily the package version of this code. -exports.SEMVER_SPEC_VERSION = '2.0.0' - -var MAX_LENGTH = 256 -var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || - /* istanbul ignore next */ 9007199254740991 - -// Max safe segment length for coercion. -var MAX_SAFE_COMPONENT_LENGTH = 16 - -// The actual regexps go on exports.re -var re = exports.re = [] -var src = exports.src = [] -var R = 0 - -// The following Regular Expressions can be used for tokenizing, -// validating, and parsing SemVer version strings. - -// ## Numeric Identifier -// A single `0`, or a non-zero digit followed by zero or more digits. - -var NUMERICIDENTIFIER = R++ -src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' -var NUMERICIDENTIFIERLOOSE = R++ -src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' - -// ## Non-numeric Identifier -// Zero or more digits, followed by a letter or hyphen, and then zero or -// more letters, digits, or hyphens. - -var NONNUMERICIDENTIFIER = R++ -src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - -// ## Main Version -// Three dot-separated numeric identifiers. - -var MAINVERSION = R++ -src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')\\.' + - '(' + src[NUMERICIDENTIFIER] + ')' - -var MAINVERSIONLOOSE = R++ -src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + - '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - -// ## Pre-release Version Identifier -// A numeric identifier, or a non-numeric identifier. - -var PRERELEASEIDENTIFIER = R++ -src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -var PRERELEASEIDENTIFIERLOOSE = R++ -src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + - '|' + src[NONNUMERICIDENTIFIER] + ')' - -// ## Pre-release Version -// Hyphen, followed by one or more dot-separated pre-release version -// identifiers. - -var PRERELEASE = R++ -src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + - '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - -var PRERELEASELOOSE = R++ -src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + - '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - -// ## Build Metadata Identifier -// Any combination of digits, letters, or hyphens. - -var BUILDIDENTIFIER = R++ -src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - -// ## Build Metadata -// Plus sign, followed by one or more period-separated build metadata -// identifiers. - -var BUILD = R++ -src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + - '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - -// ## Full Version String -// A main version, followed optionally by a pre-release version and -// build metadata. - -// Note that the only major, minor, patch, and pre-release sections of -// the version string are capturing groups. The build metadata is not a -// capturing group, because it should not ever be used in version -// comparison. - -var FULL = R++ -var FULLPLAIN = 'v?' + src[MAINVERSION] + - src[PRERELEASE] + '?' + - src[BUILD] + '?' - -src[FULL] = '^' + FULLPLAIN + '$' - -// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. -// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty -// common in the npm registry. -var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + - src[PRERELEASELOOSE] + '?' + - src[BUILD] + '?' - -var LOOSE = R++ -src[LOOSE] = '^' + LOOSEPLAIN + '$' - -var GTLT = R++ -src[GTLT] = '((?:<|>)?=?)' - -// Something like "2.*" or "1.2.x". -// Note that "x.x" is a valid xRange identifer, meaning "any version" -// Only the first item is strictly required. -var XRANGEIDENTIFIERLOOSE = R++ -src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' -var XRANGEIDENTIFIER = R++ -src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' - -var XRANGEPLAIN = R++ -src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + - '(?:' + src[PRERELEASE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGEPLAINLOOSE = R++ -src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + - '(?:' + src[PRERELEASELOOSE] + ')?' + - src[BUILD] + '?' + - ')?)?' - -var XRANGE = R++ -src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' -var XRANGELOOSE = R++ -src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - -// Coercion. -// Extract anything that could conceivably be a part of a valid semver -var COERCE = R++ -src[COERCE] = '(?:^|[^\\d])' + - '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + - '(?:$|[^\\d])' - -// Tilde ranges. -// Meaning is "reasonably at or greater than" -var LONETILDE = R++ -src[LONETILDE] = '(?:~>?)' - -var TILDETRIM = R++ -src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' -re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') -var tildeTrimReplace = '$1~' - -var TILDE = R++ -src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' -var TILDELOOSE = R++ -src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' - -// Caret ranges. -// Meaning is "at least and backwards compatible with" -var LONECARET = R++ -src[LONECARET] = '(?:\\^)' - -var CARETTRIM = R++ -src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' -re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') -var caretTrimReplace = '$1^' - -var CARET = R++ -src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' -var CARETLOOSE = R++ -src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' - -// A simple gt/lt/eq thing, or just "" to indicate "any version" -var COMPARATORLOOSE = R++ -src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' -var COMPARATOR = R++ -src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' - -// An expression to strip any whitespace between the gtlt and the thing -// it modifies, so that `> 1.2.3` ==> `>1.2.3` -var COMPARATORTRIM = R++ -src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + - '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' - -// this one has to use the /g flag -re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') -var comparatorTrimReplace = '$1$2$3' - -// Something like `1.2.3 - 1.2.4` -// Note that these all use the loose form, because they'll be -// checked against either the strict or loose comparator form -// later. -var HYPHENRANGE = R++ -src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAIN] + ')' + - '\\s*$' - -var HYPHENRANGELOOSE = R++ -src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s+-\\s+' + - '(' + src[XRANGEPLAINLOOSE] + ')' + - '\\s*$' - -// Star ranges basically just allow anything at all. -var STAR = R++ -src[STAR] = '(<|>)?=?\\s*\\*' - -// Compile to actual regexp objects. -// All are flag-free, unless they were created above with a flag. -for (var i = 0; i < R; i++) { - debug(i, src[i]) - if (!re[i]) { - re[i] = new RegExp(src[i]) - } -} - -exports.parse = parse -function parse (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - if (version.length > MAX_LENGTH) { - return null - } - - var r = options.loose ? re[LOOSE] : re[FULL] - if (!r.test(version)) { - return null - } - - try { - return new SemVer(version, options) - } catch (er) { - return null - } -} - -exports.valid = valid -function valid (version, options) { - var v = parse(version, options) - return v ? v.version : null -} - -exports.clean = clean -function clean (version, options) { - var s = parse(version.trim().replace(/^[=v]+/, ''), options) - return s ? s.version : null -} - -exports.SemVer = SemVer - -function SemVer (version, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - if (version instanceof SemVer) { - if (version.loose === options.loose) { - return version - } else { - version = version.version - } - } else if (typeof version !== 'string') { - throw new TypeError('Invalid Version: ' + version) - } - - if (version.length > MAX_LENGTH) { - throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') - } - - if (!(this instanceof SemVer)) { - return new SemVer(version, options) - } - - debug('SemVer', version, options) - this.options = options - this.loose = !!options.loose - - var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) - - if (!m) { - throw new TypeError('Invalid Version: ' + version) - } - - this.raw = version - - // these are actually numbers - this.major = +m[1] - this.minor = +m[2] - this.patch = +m[3] - - if (this.major > MAX_SAFE_INTEGER || this.major < 0) { - throw new TypeError('Invalid major version') - } - - if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { - throw new TypeError('Invalid minor version') - } - - if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { - throw new TypeError('Invalid patch version') - } - - // numberify any prerelease numeric ids - if (!m[4]) { - this.prerelease = [] - } else { - this.prerelease = m[4].split('.').map(function (id) { - if (/^[0-9]+$/.test(id)) { - var num = +id - if (num >= 0 && num < MAX_SAFE_INTEGER) { - return num - } - } - return id - }) - } - - this.build = m[5] ? m[5].split('.') : [] - this.format() -} - -SemVer.prototype.format = function () { - this.version = this.major + '.' + this.minor + '.' + this.patch - if (this.prerelease.length) { - this.version += '-' + this.prerelease.join('.') - } - return this.version -} - -SemVer.prototype.toString = function () { - return this.version -} - -SemVer.prototype.compare = function (other) { - debug('SemVer.compare', this.version, this.options, other) - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return this.compareMain(other) || this.comparePre(other) -} - -SemVer.prototype.compareMain = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - return compareIdentifiers(this.major, other.major) || - compareIdentifiers(this.minor, other.minor) || - compareIdentifiers(this.patch, other.patch) -} - -SemVer.prototype.comparePre = function (other) { - if (!(other instanceof SemVer)) { - other = new SemVer(other, this.options) - } - - // NOT having a prerelease is > having one - if (this.prerelease.length && !other.prerelease.length) { - return -1 - } else if (!this.prerelease.length && other.prerelease.length) { - return 1 - } else if (!this.prerelease.length && !other.prerelease.length) { - return 0 - } - - var i = 0 - do { - var a = this.prerelease[i] - var b = other.prerelease[i] - debug('prerelease compare', i, a, b) - if (a === undefined && b === undefined) { - return 0 - } else if (b === undefined) { - return 1 - } else if (a === undefined) { - return -1 - } else if (a === b) { - continue - } else { - return compareIdentifiers(a, b) - } - } while (++i) -} - -// preminor will bump the version up to the next minor release, and immediately -// down to pre-release. premajor and prepatch work the same way. -SemVer.prototype.inc = function (release, identifier) { - switch (release) { - case 'premajor': - this.prerelease.length = 0 - this.patch = 0 - this.minor = 0 - this.major++ - this.inc('pre', identifier) - break - case 'preminor': - this.prerelease.length = 0 - this.patch = 0 - this.minor++ - this.inc('pre', identifier) - break - case 'prepatch': - // If this is already a prerelease, it will bump to the next version - // drop any prereleases that might already exist, since they are not - // relevant at this point. - this.prerelease.length = 0 - this.inc('patch', identifier) - this.inc('pre', identifier) - break - // If the input is a non-prerelease version, this acts the same as - // prepatch. - case 'prerelease': - if (this.prerelease.length === 0) { - this.inc('patch', identifier) - } - this.inc('pre', identifier) - break - - case 'major': - // If this is a pre-major version, bump up to the same major version. - // Otherwise increment major. - // 1.0.0-5 bumps to 1.0.0 - // 1.1.0 bumps to 2.0.0 - if (this.minor !== 0 || - this.patch !== 0 || - this.prerelease.length === 0) { - this.major++ - } - this.minor = 0 - this.patch = 0 - this.prerelease = [] - break - case 'minor': - // If this is a pre-minor version, bump up to the same minor version. - // Otherwise increment minor. - // 1.2.0-5 bumps to 1.2.0 - // 1.2.1 bumps to 1.3.0 - if (this.patch !== 0 || this.prerelease.length === 0) { - this.minor++ - } - this.patch = 0 - this.prerelease = [] - break - case 'patch': - // If this is not a pre-release version, it will increment the patch. - // If it is a pre-release it will bump up to the same patch version. - // 1.2.0-5 patches to 1.2.0 - // 1.2.0 patches to 1.2.1 - if (this.prerelease.length === 0) { - this.patch++ - } - this.prerelease = [] - break - // This probably shouldn't be used publicly. - // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. - case 'pre': - if (this.prerelease.length === 0) { - this.prerelease = [0] - } else { - var i = this.prerelease.length - while (--i >= 0) { - if (typeof this.prerelease[i] === 'number') { - this.prerelease[i]++ - i = -2 - } - } - if (i === -1) { - // didn't increment anything - this.prerelease.push(0) - } - } - if (identifier) { - // 1.2.0-beta.1 bumps to 1.2.0-beta.2, - // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 - if (this.prerelease[0] === identifier) { - if (isNaN(this.prerelease[1])) { - this.prerelease = [identifier, 0] - } - } else { - this.prerelease = [identifier, 0] - } - } - break - - default: - throw new Error('invalid increment argument: ' + release) - } - this.format() - this.raw = this.version - return this -} - -exports.inc = inc -function inc (version, release, loose, identifier) { - if (typeof (loose) === 'string') { - identifier = loose - loose = undefined - } - - try { - return new SemVer(version, loose).inc(release, identifier).version - } catch (er) { - return null - } -} - -exports.diff = diff -function diff (version1, version2) { - if (eq(version1, version2)) { - return null - } else { - var v1 = parse(version1) - var v2 = parse(version2) - var prefix = '' - if (v1.prerelease.length || v2.prerelease.length) { - prefix = 'pre' - var defaultResult = 'prerelease' - } - for (var key in v1) { - if (key === 'major' || key === 'minor' || key === 'patch') { - if (v1[key] !== v2[key]) { - return prefix + key - } - } - } - return defaultResult // may be undefined - } -} - -exports.compareIdentifiers = compareIdentifiers - -var numeric = /^[0-9]+$/ -function compareIdentifiers (a, b) { - var anum = numeric.test(a) - var bnum = numeric.test(b) - - if (anum && bnum) { - a = +a - b = +b - } - - return a === b ? 0 - : (anum && !bnum) ? -1 - : (bnum && !anum) ? 1 - : a < b ? -1 - : 1 -} - -exports.rcompareIdentifiers = rcompareIdentifiers -function rcompareIdentifiers (a, b) { - return compareIdentifiers(b, a) -} - -exports.major = major -function major (a, loose) { - return new SemVer(a, loose).major -} - -exports.minor = minor -function minor (a, loose) { - return new SemVer(a, loose).minor -} - -exports.patch = patch -function patch (a, loose) { - return new SemVer(a, loose).patch -} - -exports.compare = compare -function compare (a, b, loose) { - return new SemVer(a, loose).compare(new SemVer(b, loose)) -} - -exports.compareLoose = compareLoose -function compareLoose (a, b) { - return compare(a, b, true) -} - -exports.rcompare = rcompare -function rcompare (a, b, loose) { - return compare(b, a, loose) -} - -exports.sort = sort -function sort (list, loose) { - return list.sort(function (a, b) { - return exports.compare(a, b, loose) - }) -} - -exports.rsort = rsort -function rsort (list, loose) { - return list.sort(function (a, b) { - return exports.rcompare(a, b, loose) - }) -} - -exports.gt = gt -function gt (a, b, loose) { - return compare(a, b, loose) > 0 -} - -exports.lt = lt -function lt (a, b, loose) { - return compare(a, b, loose) < 0 -} - -exports.eq = eq -function eq (a, b, loose) { - return compare(a, b, loose) === 0 -} - -exports.neq = neq -function neq (a, b, loose) { - return compare(a, b, loose) !== 0 -} - -exports.gte = gte -function gte (a, b, loose) { - return compare(a, b, loose) >= 0 -} - -exports.lte = lte -function lte (a, b, loose) { - return compare(a, b, loose) <= 0 -} - -exports.cmp = cmp -function cmp (a, op, b, loose) { - switch (op) { - case '===': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a === b - - case '!==': - if (typeof a === 'object') - a = a.version - if (typeof b === 'object') - b = b.version - return a !== b - - case '': - case '=': - case '==': - return eq(a, b, loose) - - case '!=': - return neq(a, b, loose) - - case '>': - return gt(a, b, loose) - - case '>=': - return gte(a, b, loose) - - case '<': - return lt(a, b, loose) - - case '<=': - return lte(a, b, loose) - - default: - throw new TypeError('Invalid operator: ' + op) - } -} - -exports.Comparator = Comparator -function Comparator (comp, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (comp instanceof Comparator) { - if (comp.loose === !!options.loose) { - return comp - } else { - comp = comp.value - } - } - - if (!(this instanceof Comparator)) { - return new Comparator(comp, options) - } - - debug('comparator', comp, options) - this.options = options - this.loose = !!options.loose - this.parse(comp) - - if (this.semver === ANY) { - this.value = '' - } else { - this.value = this.operator + this.semver.version - } - - debug('comp', this) -} - -var ANY = {} -Comparator.prototype.parse = function (comp) { - var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var m = comp.match(r) - - if (!m) { - throw new TypeError('Invalid comparator: ' + comp) - } - - this.operator = m[1] - if (this.operator === '=') { - this.operator = '' - } - - // if it literally is just '>' or '' then allow anything. - if (!m[2]) { - this.semver = ANY - } else { - this.semver = new SemVer(m[2], this.options.loose) - } -} - -Comparator.prototype.toString = function () { - return this.value -} - -Comparator.prototype.test = function (version) { - debug('Comparator.test', version, this.options.loose) - - if (this.semver === ANY) { - return true - } - - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - - return cmp(version, this.operator, this.semver, this.options) -} - -Comparator.prototype.intersects = function (comp, options) { - if (!(comp instanceof Comparator)) { - throw new TypeError('a Comparator is required') - } - - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - var rangeTmp - - if (this.operator === '') { - rangeTmp = new Range(comp.value, options) - return satisfies(this.value, rangeTmp, options) - } else if (comp.operator === '') { - rangeTmp = new Range(this.value, options) - return satisfies(comp.semver, rangeTmp, options) - } - - var sameDirectionIncreasing = - (this.operator === '>=' || this.operator === '>') && - (comp.operator === '>=' || comp.operator === '>') - var sameDirectionDecreasing = - (this.operator === '<=' || this.operator === '<') && - (comp.operator === '<=' || comp.operator === '<') - var sameSemVer = this.semver.version === comp.semver.version - var differentDirectionsInclusive = - (this.operator === '>=' || this.operator === '<=') && - (comp.operator === '>=' || comp.operator === '<=') - var oppositeDirectionsLessThan = - cmp(this.semver, '<', comp.semver, options) && - ((this.operator === '>=' || this.operator === '>') && - (comp.operator === '<=' || comp.operator === '<')) - var oppositeDirectionsGreaterThan = - cmp(this.semver, '>', comp.semver, options) && - ((this.operator === '<=' || this.operator === '<') && - (comp.operator === '>=' || comp.operator === '>')) - - return sameDirectionIncreasing || sameDirectionDecreasing || - (sameSemVer && differentDirectionsInclusive) || - oppositeDirectionsLessThan || oppositeDirectionsGreaterThan -} - -exports.Range = Range -function Range (range, options) { - if (!options || typeof options !== 'object') { - options = { - loose: !!options, - includePrerelease: false - } - } - - if (range instanceof Range) { - if (range.loose === !!options.loose && - range.includePrerelease === !!options.includePrerelease) { - return range - } else { - return new Range(range.raw, options) - } - } - - if (range instanceof Comparator) { - return new Range(range.value, options) - } - - if (!(this instanceof Range)) { - return new Range(range, options) - } - - this.options = options - this.loose = !!options.loose - this.includePrerelease = !!options.includePrerelease - - // First, split based on boolean or || - this.raw = range - this.set = range.split(/\s*\|\|\s*/).map(function (range) { - return this.parseRange(range.trim()) - }, this).filter(function (c) { - // throw out any that are not relevant for whatever reason - return c.length - }) - - if (!this.set.length) { - throw new TypeError('Invalid SemVer Range: ' + range) - } - - this.format() -} - -Range.prototype.format = function () { - this.range = this.set.map(function (comps) { - return comps.join(' ').trim() - }).join('||').trim() - return this.range -} - -Range.prototype.toString = function () { - return this.range -} - -Range.prototype.parseRange = function (range) { - var loose = this.options.loose - range = range.trim() - // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` - var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] - range = range.replace(hr, hyphenReplace) - debug('hyphen replace', range) - // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` - range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) - debug('comparator trim', range, re[COMPARATORTRIM]) - - // `~ 1.2.3` => `~1.2.3` - range = range.replace(re[TILDETRIM], tildeTrimReplace) - - // `^ 1.2.3` => `^1.2.3` - range = range.replace(re[CARETTRIM], caretTrimReplace) - - // normalize spaces - range = range.split(/\s+/).join(' ') - - // At this point, the range is completely trimmed and - // ready to be split into comparators. - - var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] - var set = range.split(' ').map(function (comp) { - return parseComparator(comp, this.options) - }, this).join(' ').split(/\s+/) - if (this.options.loose) { - // in loose mode, throw out any that are not valid comparators - set = set.filter(function (comp) { - return !!comp.match(compRe) - }) - } - set = set.map(function (comp) { - return new Comparator(comp, this.options) - }, this) - - return set -} - -Range.prototype.intersects = function (range, options) { - if (!(range instanceof Range)) { - throw new TypeError('a Range is required') - } - - return this.set.some(function (thisComparators) { - return thisComparators.every(function (thisComparator) { - return range.set.some(function (rangeComparators) { - return rangeComparators.every(function (rangeComparator) { - return thisComparator.intersects(rangeComparator, options) - }) - }) - }) - }) -} - -// Mostly just for testing and legacy API reasons -exports.toComparators = toComparators -function toComparators (range, options) { - return new Range(range, options).set.map(function (comp) { - return comp.map(function (c) { - return c.value - }).join(' ').trim().split(' ') - }) -} - -// comprised of xranges, tildes, stars, and gtlt's at this point. -// already replaced the hyphen ranges -// turn into a set of JUST comparators. -function parseComparator (comp, options) { - debug('comp', comp, options) - comp = replaceCarets(comp, options) - debug('caret', comp) - comp = replaceTildes(comp, options) - debug('tildes', comp) - comp = replaceXRanges(comp, options) - debug('xrange', comp) - comp = replaceStars(comp, options) - debug('stars', comp) - return comp -} - -function isX (id) { - return !id || id.toLowerCase() === 'x' || id === '*' -} - -// ~, ~> --> * (any, kinda silly) -// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 -// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 -// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 -// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 -// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 -function replaceTildes (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceTilde(comp, options) - }).join(' ') -} - -function replaceTilde (comp, options) { - var r = options.loose ? re[TILDELOOSE] : re[TILDE] - return comp.replace(r, function (_, M, m, p, pr) { - debug('tilde', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - // ~1.2 == >=1.2.0 <1.3.0 - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else if (pr) { - debug('replaceTilde pr', pr) - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } else { - // ~1.2.3 == >=1.2.3 <1.3.0 - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - - debug('tilde return', ret) - return ret - }) -} - -// ^ --> * (any, kinda silly) -// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 -// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 -// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 -// ^1.2.3 --> >=1.2.3 <2.0.0 -// ^1.2.0 --> >=1.2.0 <2.0.0 -function replaceCarets (comp, options) { - return comp.trim().split(/\s+/).map(function (comp) { - return replaceCaret(comp, options) - }).join(' ') -} - -function replaceCaret (comp, options) { - debug('caret', comp, options) - var r = options.loose ? re[CARETLOOSE] : re[CARET] - return comp.replace(r, function (_, M, m, p, pr) { - debug('caret', comp, _, M, m, p, pr) - var ret - - if (isX(M)) { - ret = '' - } else if (isX(m)) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (isX(p)) { - if (M === '0') { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } else { - ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' - } - } else if (pr) { - debug('replaceCaret pr', pr) - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + '-' + pr + - ' <' + (+M + 1) + '.0.0' - } - } else { - debug('no pr') - if (M === '0') { - if (m === '0') { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + m + '.' + (+p + 1) - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + M + '.' + (+m + 1) + '.0' - } - } else { - ret = '>=' + M + '.' + m + '.' + p + - ' <' + (+M + 1) + '.0.0' - } - } - - debug('caret return', ret) - return ret - }) -} - -function replaceXRanges (comp, options) { - debug('replaceXRanges', comp, options) - return comp.split(/\s+/).map(function (comp) { - return replaceXRange(comp, options) - }).join(' ') -} - -function replaceXRange (comp, options) { - comp = comp.trim() - var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] - return comp.replace(r, function (ret, gtlt, M, m, p, pr) { - debug('xRange', comp, ret, gtlt, M, m, p, pr) - var xM = isX(M) - var xm = xM || isX(m) - var xp = xm || isX(p) - var anyX = xp - - if (gtlt === '=' && anyX) { - gtlt = '' - } - - if (xM) { - if (gtlt === '>' || gtlt === '<') { - // nothing is allowed - ret = '<0.0.0' - } else { - // nothing is forbidden - ret = '*' - } - } else if (gtlt && anyX) { - // we know patch is an x, because we have any x at all. - // replace X with 0 - if (xm) { - m = 0 - } - p = 0 - - if (gtlt === '>') { - // >1 => >=2.0.0 - // >1.2 => >=1.3.0 - // >1.2.3 => >= 1.2.4 - gtlt = '>=' - if (xm) { - M = +M + 1 - m = 0 - p = 0 - } else { - m = +m + 1 - p = 0 - } - } else if (gtlt === '<=') { - // <=0.7.x is actually <0.8.0, since any 0.7.x should - // pass. Similarly, <=7.x is actually <8.0.0, etc. - gtlt = '<' - if (xm) { - M = +M + 1 - } else { - m = +m + 1 - } - } - - ret = gtlt + M + '.' + m + '.' + p - } else if (xm) { - ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' - } else if (xp) { - ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' - } - - debug('xRange return', ret) - - return ret - }) -} - -// Because * is AND-ed with everything else in the comparator, -// and '' means "any version", just remove the *s entirely. -function replaceStars (comp, options) { - debug('replaceStars', comp, options) - // Looseness is ignored here. star is always as loose as it gets! - return comp.trim().replace(re[STAR], '') -} - -// This function is passed to string.replace(re[HYPHENRANGE]) -// M, m, patch, prerelease, build -// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 -// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do -// 1.2 - 3.4 => >=1.2.0 <3.5.0 -function hyphenReplace ($0, - from, fM, fm, fp, fpr, fb, - to, tM, tm, tp, tpr, tb) { - if (isX(fM)) { - from = '' - } else if (isX(fm)) { - from = '>=' + fM + '.0.0' - } else if (isX(fp)) { - from = '>=' + fM + '.' + fm + '.0' - } else { - from = '>=' + from - } - - if (isX(tM)) { - to = '' - } else if (isX(tm)) { - to = '<' + (+tM + 1) + '.0.0' - } else if (isX(tp)) { - to = '<' + tM + '.' + (+tm + 1) + '.0' - } else if (tpr) { - to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr - } else { - to = '<=' + to - } - - return (from + ' ' + to).trim() -} - -// if ANY of the sets match ALL of its comparators, then pass -Range.prototype.test = function (version) { - if (!version) { - return false - } - - if (typeof version === 'string') { - version = new SemVer(version, this.options) - } - - for (var i = 0; i < this.set.length; i++) { - if (testSet(this.set[i], version, this.options)) { - return true - } - } - return false -} - -function testSet (set, version, options) { - for (var i = 0; i < set.length; i++) { - if (!set[i].test(version)) { - return false - } - } - - if (version.prerelease.length && !options.includePrerelease) { - // Find the set of versions that are allowed to have prereleases - // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 - // That should allow `1.2.3-pr.2` to pass. - // However, `1.2.4-alpha.notready` should NOT be allowed, - // even though it's within the range set by the comparators. - for (i = 0; i < set.length; i++) { - debug(set[i].semver) - if (set[i].semver === ANY) { - continue - } - - if (set[i].semver.prerelease.length > 0) { - var allowed = set[i].semver - if (allowed.major === version.major && - allowed.minor === version.minor && - allowed.patch === version.patch) { - return true - } - } - } - - // Version has a -pre, but it's not one of the ones we like. - return false - } - - return true -} - -exports.satisfies = satisfies -function satisfies (version, range, options) { - try { - range = new Range(range, options) - } catch (er) { - return false - } - return range.test(version) -} - -exports.maxSatisfying = maxSatisfying -function maxSatisfying (versions, range, options) { - var max = null - var maxSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!max || maxSV.compare(v) === -1) { - // compare(max, v, true) - max = v - maxSV = new SemVer(max, options) - } - } - }) - return max -} - -exports.minSatisfying = minSatisfying -function minSatisfying (versions, range, options) { - var min = null - var minSV = null - try { - var rangeObj = new Range(range, options) - } catch (er) { - return null - } - versions.forEach(function (v) { - if (rangeObj.test(v)) { - // satisfies(v, range, options) - if (!min || minSV.compare(v) === 1) { - // compare(min, v, true) - min = v - minSV = new SemVer(min, options) - } - } - }) - return min -} - -exports.minVersion = minVersion -function minVersion (range, loose) { - range = new Range(range, loose) - - var minver = new SemVer('0.0.0') - if (range.test(minver)) { - return minver - } - - minver = new SemVer('0.0.0-0') - if (range.test(minver)) { - return minver - } - - minver = null - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - comparators.forEach(function (comparator) { - // Clone to avoid manipulating the comparator's semver object. - var compver = new SemVer(comparator.semver.version) - switch (comparator.operator) { - case '>': - if (compver.prerelease.length === 0) { - compver.patch++ - } else { - compver.prerelease.push(0) - } - compver.raw = compver.format() - /* fallthrough */ - case '': - case '>=': - if (!minver || gt(minver, compver)) { - minver = compver - } - break - case '<': - case '<=': - /* Ignore maximum versions */ - break - /* istanbul ignore next */ - default: - throw new Error('Unexpected operation: ' + comparator.operator) - } - }) - } - - if (minver && range.test(minver)) { - return minver - } - - return null -} - -exports.validRange = validRange -function validRange (range, options) { - try { - // Return '*' instead of '' so that truthiness works. - // This will throw if it's invalid anyway - return new Range(range, options).range || '*' - } catch (er) { - return null - } -} - -// Determine if version is less than all the versions possible in the range -exports.ltr = ltr -function ltr (version, range, options) { - return outside(version, range, '<', options) -} - -// Determine if version is greater than all the versions possible in the range. -exports.gtr = gtr -function gtr (version, range, options) { - return outside(version, range, '>', options) -} - -exports.outside = outside -function outside (version, range, hilo, options) { - version = new SemVer(version, options) - range = new Range(range, options) - - var gtfn, ltefn, ltfn, comp, ecomp - switch (hilo) { - case '>': - gtfn = gt - ltefn = lte - ltfn = lt - comp = '>' - ecomp = '>=' - break - case '<': - gtfn = lt - ltefn = gte - ltfn = gt - comp = '<' - ecomp = '<=' - break - default: - throw new TypeError('Must provide a hilo val of "<" or ">"') - } - - // If it satisifes the range it is not outside - if (satisfies(version, range, options)) { - return false - } - - // From now on, variable terms are as if we're in "gtr" mode. - // but note that everything is flipped for the "ltr" function. - - for (var i = 0; i < range.set.length; ++i) { - var comparators = range.set[i] - - var high = null - var low = null - - comparators.forEach(function (comparator) { - if (comparator.semver === ANY) { - comparator = new Comparator('>=0.0.0') - } - high = high || comparator - low = low || comparator - if (gtfn(comparator.semver, high.semver, options)) { - high = comparator - } else if (ltfn(comparator.semver, low.semver, options)) { - low = comparator - } - }) - - // If the edge version comparator has a operator then our version - // isn't outside it - if (high.operator === comp || high.operator === ecomp) { - return false - } - - // If the lowest version comparator has an operator and our version - // is less than it then it isn't higher than the range - if ((!low.operator || low.operator === comp) && - ltefn(version, low.semver)) { - return false - } else if (low.operator === ecomp && ltfn(version, low.semver)) { - return false - } - } - return true -} - -exports.prerelease = prerelease -function prerelease (version, options) { - var parsed = parse(version, options) - return (parsed && parsed.prerelease.length) ? parsed.prerelease : null -} - -exports.intersects = intersects -function intersects (r1, r2, options) { - r1 = new Range(r1, options) - r2 = new Range(r2, options) - return r1.intersects(r2) -} - -exports.coerce = coerce -function coerce (version) { - if (version instanceof SemVer) { - return version - } - - if (typeof version !== 'string') { - return null - } - - var match = version.match(re[COERCE]) - - if (match == null) { - return null - } - - return parse(match[1] + - '.' + (match[2] || '0') + - '.' + (match[3] || '0')) -} - - -/***/ }), -/* 281 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const internal_globber_1 = __webpack_require__(297); -/** - * Constructs a globber - * - * @param patterns Patterns separated by newlines - * @param options Glob options - */ -function create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - return yield internal_globber_1.DefaultGlobber.create(patterns, options); - }); -} -exports.create = create; -//# sourceMappingURL=glob.js.map - -/***/ }), -/* 282 */, -/* 283 */, -/* 284 */, -/* 285 */, -/* 286 */, -/* 287 */, -/* 288 */, -/* 289 */, -/* 290 */, -/* 291 */, -/* 292 */, -/* 293 */ -/***/ (function(module) { - -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) - - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') - - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) - - return wrapper - - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret - } -} - - -/***/ }), -/* 294 */, -/* 295 */, -/* 296 */, -/* 297 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } -var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var g = generator.apply(thisArg, _arguments || []), i, q = []; - return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; - function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } - function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } - function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } - function fulfill(value) { resume("next", value); } - function reject(value) { resume("throw", value); } - function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(470); -const fs = __webpack_require__(747); -const globOptionsHelper = __webpack_require__(601); -const path = __webpack_require__(622); -const patternHelper = __webpack_require__(597); -const internal_match_kind_1 = __webpack_require__(327); -const internal_pattern_1 = __webpack_require__(923); -const internal_search_state_1 = __webpack_require__(728); -const IS_WINDOWS = process.platform === 'win32'; -class DefaultGlobber { - constructor(options) { - this.patterns = []; - this.searchPaths = []; - this.options = globOptionsHelper.getOptions(options); - } - getSearchPaths() { - // Return a copy - return this.searchPaths.slice(); - } - glob() { - var e_1, _a; - return __awaiter(this, void 0, void 0, function* () { - const result = []; - try { - for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { - const itemPath = _c.value; - result.push(itemPath); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); - } - finally { if (e_1) throw e_1.error; } - } - return result; - }); - } - globGenerator() { - return __asyncGenerator(this, arguments, function* globGenerator_1() { - // Fill in defaults options - const options = globOptionsHelper.getOptions(this.options); - // Implicit descendants? - const patterns = []; - for (const pattern of this.patterns) { - patterns.push(pattern); - if (options.implicitDescendants && - (pattern.trailingSeparator || - pattern.segments[pattern.segments.length - 1] !== '**')) { - patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); - } - } - // Push the search paths - const stack = []; - for (const searchPath of patternHelper.getSearchPaths(patterns)) { - core.debug(`Search path '${searchPath}'`); - // Exists? - try { - // Intentionally using lstat. Detection for broken symlink - // will be performed later (if following symlinks). - yield __await(fs.promises.lstat(searchPath)); - } - catch (err) { - if (err.code === 'ENOENT') { - continue; - } - throw err; - } - stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); - } - // Search - const traversalChain = []; // used to detect cycles - while (stack.length) { - // Pop - const item = stack.pop(); - // Match? - const match = patternHelper.match(patterns, item.path); - const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); - if (!match && !partialMatch) { - continue; - } - // Stat - const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) - // Broken symlink, or symlink cycle detected, or no longer exists - ); - // Broken symlink, or symlink cycle detected, or no longer exists - if (!stats) { - continue; - } - // Directory - if (stats.isDirectory()) { - // Matched - if (match & internal_match_kind_1.MatchKind.Directory) { - yield yield __await(item.path); - } - // Descend? - else if (!partialMatch) { - continue; - } - // Push the child items in reverse - const childLevel = item.level + 1; - const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); - stack.push(...childItems.reverse()); - } - // File - else if (match & internal_match_kind_1.MatchKind.File) { - yield yield __await(item.path); - } - } - }); - } - /** - * Constructs a DefaultGlobber - */ - static create(patterns, options) { - return __awaiter(this, void 0, void 0, function* () { - const result = new DefaultGlobber(options); - if (IS_WINDOWS) { - patterns = patterns.replace(/\r\n/g, '\n'); - patterns = patterns.replace(/\r/g, '\n'); - } - const lines = patterns.split('\n').map(x => x.trim()); - for (const line of lines) { - // Empty or comment - if (!line || line.startsWith('#')) { - continue; - } - // Pattern - else { - result.patterns.push(new internal_pattern_1.Pattern(line)); - } - } - result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); - return result; - }); - } - static stat(item, options, traversalChain) { - return __awaiter(this, void 0, void 0, function* () { - // Note: - // `stat` returns info about the target of a symlink (or symlink chain) - // `lstat` returns info about a symlink itself - let stats; - if (options.followSymbolicLinks) { - try { - // Use `stat` (following symlinks) - stats = yield fs.promises.stat(item.path); - } - catch (err) { - if (err.code === 'ENOENT') { - if (options.omitBrokenSymbolicLinks) { - core.debug(`Broken symlink '${item.path}'`); - return undefined; - } - throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); - } - throw err; - } - } - else { - // Use `lstat` (not following symlinks) - stats = yield fs.promises.lstat(item.path); - } - // Note, isDirectory() returns false for the lstat of a symlink - if (stats.isDirectory() && options.followSymbolicLinks) { - // Get the realpath - const realPath = yield fs.promises.realpath(item.path); - // Fixup the traversal chain to match the item level - while (traversalChain.length >= item.level) { - traversalChain.pop(); - } - // Test for a cycle - if (traversalChain.some((x) => x === realPath)) { - core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); - return undefined; - } - // Update the traversal chain - traversalChain.push(realPath); - } - return stats; - }); - } -} -exports.DefaultGlobber = DefaultGlobber; -//# sourceMappingURL=internal-globber.js.map - -/***/ }), -/* 298 */, -/* 299 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -const VERSION = "2.4.0"; - -/** - * Some “list” response that can be paginated have a different response structure - * - * They have a `total_count` key in the response (search also has `incomplete_results`, - * /installation/repositories also has `repository_selection`), as well as a key with - * the list of the items which name varies from endpoint to endpoint. - * - * Octokit normalizes these responses so that paginated results are always returned following - * the same structure. One challenge is that if the list response has only one page, no Link - * header is provided, so this header alone is not sufficient to check wether a response is - * paginated or not. - * - * We check if a "total_count" key is present in the response data, but also make sure that - * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would - * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref - */ -function normalizePaginatedListResponse(response) { - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way - // to retrieve the same information. - - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - - response.data.total_count = totalCount; - return response; -} - -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - next() { - if (!url) { - return Promise.resolve({ - done: true - }); - } - - return requestMethod({ - method, - url, - headers - }).then(normalizePaginatedListResponse).then(response => { - // `response.headers.link` format: - // '; rel="next", ; rel="last"' - // sets `url` to undefined if "next" URL is not present or `link` header is not set - url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; - return { - value: response - }; - }); - } - - }) - }; -} - -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = undefined; - } - - return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); -} - -function gather(octokit, results, iterator, mapFn) { - return iterator.next().then(result => { - if (result.done) { - return results; - } - - let earlyExit = false; - - function done() { - earlyExit = true; - } - - results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); - - if (earlyExit) { - return results; - } - - return gather(octokit, results, iterator, mapFn); - }); -} - -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ - -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; - -exports.paginateRest = paginateRest; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 300 */, -/* 301 */, -/* 302 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = realpath -realpath.realpath = realpath -realpath.sync = realpathSync -realpath.realpathSync = realpathSync -realpath.monkeypatch = monkeypatch -realpath.unmonkeypatch = unmonkeypatch - -var fs = __webpack_require__(747) -var origRealpath = fs.realpath -var origRealpathSync = fs.realpathSync - -var version = process.version -var ok = /^v[0-5]\./.test(version) -var old = __webpack_require__(117) - -function newError (er) { - return er && er.syscall === 'realpath' && ( - er.code === 'ELOOP' || - er.code === 'ENOMEM' || - er.code === 'ENAMETOOLONG' - ) -} - -function realpath (p, cache, cb) { - if (ok) { - return origRealpath(p, cache, cb) - } - - if (typeof cache === 'function') { - cb = cache - cache = null - } - origRealpath(p, cache, function (er, result) { - if (newError(er)) { - old.realpath(p, cache, cb) - } else { - cb(er, result) - } - }) -} - -function realpathSync (p, cache) { - if (ok) { - return origRealpathSync(p, cache) - } - - try { - return origRealpathSync(p, cache) - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache) - } else { - throw er - } - } -} - -function monkeypatch () { - fs.realpath = realpath - fs.realpathSync = realpathSync -} - -function unmonkeypatch () { - fs.realpath = origRealpath - fs.realpathSync = origRealpathSync -} - - -/***/ }), -/* 303 */, -/* 304 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; -// Standard YAML's JSON schema. -// http://www.yaml.org/spec/1.2/spec.html#id2803231 -// -// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. -// So, this schema is not such strict as defined in the YAML specification. -// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. - - - - - -var Schema = __webpack_require__(917); - - -module.exports = new Schema({ - include: [ - __webpack_require__(497) - ], - implicit: [ - __webpack_require__(339), - __webpack_require__(142), - __webpack_require__(147), - __webpack_require__(77) - ] -}); - - -/***/ }), -/* 305 */, -/* 306 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var concatMap = __webpack_require__(896); -var balanced = __webpack_require__(621); - -module.exports = expandTop; - -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; - -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} - -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} - -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} - - -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; - - var parts = []; - var m = balanced('{', '}', str); - - if (!m) - return str.split(','); - - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); - - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } - - parts.push.apply(parts, p); - - return parts; -} - -function expandTop(str) { - if (!str) - return []; - - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } - - return expand(escapeBraces(str), true).map(unescapeBraces); -} - -function identity(e) { - return e; -} - -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} - -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} - -function expand(str, isTop) { - var expansions = []; - - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; - - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } - - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } - - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. - - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; - - var N; - - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); - - N = []; - - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } - - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } - - return expansions; -} - - - -/***/ }), -/* 307 */, -/* 308 */, -/* 309 */, -/* 310 */, -/* 311 */, -/* 312 */, -/* 313 */, -/* 314 */, -/* 315 */ -/***/ (function(module) { - -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } - } -} - - -/***/ }), -/* 316 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - - -var common = __webpack_require__(436); - - -function Mark(name, buffer, position, line, column) { - this.name = name; - this.buffer = buffer; - this.position = position; - this.line = line; - this.column = column; -} - - -Mark.prototype.getSnippet = function getSnippet(indent, maxLength) { - var head, start, tail, end, snippet; - - if (!this.buffer) return null; - - indent = indent || 4; - maxLength = maxLength || 75; - - head = ''; - start = this.position; - - while (start > 0 && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(start - 1)) === -1) { - start -= 1; - if (this.position - start > (maxLength / 2 - 1)) { - head = ' ... '; - start += 5; - break; - } - } - - tail = ''; - end = this.position; - - while (end < this.buffer.length && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(end)) === -1) { - end += 1; - if (end - this.position > (maxLength / 2 - 1)) { - tail = ' ... '; - end -= 5; - break; - } - } - - snippet = this.buffer.slice(start, end); - - return common.repeat(' ', indent) + head + snippet + tail + '\n' + - common.repeat(' ', indent + this.position - start + head.length) + '^'; -}; - - -Mark.prototype.toString = function toString(compact) { - var snippet, where = ''; - - if (this.name) { - where += 'in "' + this.name + '" '; - } - - where += 'at line ' + (this.line + 1) + ', column ' + (this.column + 1); - - if (!compact) { - snippet = this.getSnippet(); - - if (snippet) { - where += ':\n' + snippet; - } - } - - return where; -}; - - -module.exports = Mark; - - -/***/ }), -/* 317 */, -/* 318 */, -/* 319 */, -/* 320 */, -/* 321 */, -/* 322 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const u = __webpack_require__(0).fromPromise -const fs = __webpack_require__(869) - -function pathExists (path) { - return fs.access(path).then(() => true).catch(() => false) -} - -module.exports = { - pathExists: u(pathExists), - pathExistsSync: fs.existsSync -} - - -/***/ }), -/* 323 */ -/***/ (function(module) { - -"use strict"; - - -var isStream = module.exports = function (stream) { - return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; -}; - -isStream.writable = function (stream) { - return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; -}; - -isStream.readable = function (stream) { - return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; -}; - -isStream.duplex = function (stream) { - return isStream.writable(stream) && isStream.readable(stream); -}; - -isStream.transform = function (stream) { - return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; -}; - - -/***/ }), -/* 324 */, -/* 325 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -module.exports = new Type('tag:yaml.org,2002:map', { - kind: 'mapping', - construct: function (data) { return data !== null ? data : {}; } -}); - - -/***/ }), -/* 326 */, -/* 327 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -/** - * Indicates whether a pattern matches a path - */ -var MatchKind; -(function (MatchKind) { - /** Not matched */ - MatchKind[MatchKind["None"] = 0] = "None"; - /** Matched if the path is a directory */ - MatchKind[MatchKind["Directory"] = 1] = "Directory"; - /** Matched if the path is a regular file */ - MatchKind[MatchKind["File"] = 2] = "File"; - /** Matched */ - MatchKind[MatchKind["All"] = 3] = "All"; -})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); -//# sourceMappingURL=internal-match-kind.js.map - -/***/ }), -/* 328 */, -/* 329 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -var _toString = Object.prototype.toString; - -function resolveYamlPairs(data) { - if (data === null) return true; - - var index, length, pair, keys, result, - object = data; - - result = new Array(object.length); - - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - - if (_toString.call(pair) !== '[object Object]') return false; - - keys = Object.keys(pair); - - if (keys.length !== 1) return false; - - result[index] = [ keys[0], pair[keys[0]] ]; - } - - return true; -} - -function constructYamlPairs(data) { - if (data === null) return []; - - var index, length, pair, keys, result, - object = data; - - result = new Array(object.length); - - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - - keys = Object.keys(pair); - - result[index] = [ keys[0], pair[keys[0]] ]; - } - - return result; -} - -module.exports = new Type('tag:yaml.org,2002:pairs', { - kind: 'sequence', - resolve: resolveYamlPairs, - construct: constructYamlPairs -}); - - -/***/ }), -/* 330 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { - clone, - doesBranchExist, - merge: gitMerge, - hasPullRequest, - getForkedProject -} = __webpack_require__(484); -const { logger } = __webpack_require__(79); -const { treatUrl } = __webpack_require__(594); -const { checkUrlExist } = __webpack_require__(22); -const { getNodeTriggeringJob } = __webpack_require__(645); -const fs = __webpack_require__(747); - -async function checkoutDefinitionTree(context, nodeChain, flow = "pr") { - const nodeTriggeringTheJob = getNodeTriggeringJob(context, nodeChain); - return Promise.all( - nodeChain.map(async node => { - try { - const result = Promise.resolve({ - project: node.project, - checkoutInfo: - flow === "pr" - ? await checkoutProjectPullRequestFlow( - context, - node, - nodeTriggeringTheJob - ) - : await checkoutProjectBranchFlow( - context, - node, - nodeTriggeringTheJob - ) - }); - logger.info(`[${node.project}] Checked out.`); - return result; - } catch (err) { - throw { project: node.project, message: err }; - } - }) - ) - .then(result => { - return result.reduce((acc, curr) => { - acc[curr.project] = curr.checkoutInfo; - return acc; - }, {}); - }) - .catch(err => { - logger.error(`[${err.project}] Error checking it out. ${err.message}`); - throw err.message; - }); -} - -async function checkoutProjectPullRequestFlow( - context, - node, - nodeTriggeringTheJob -) { - logger.info(`[${node.project}] Checking out project`); - const dir = getDir(context.config.rootFolder, node.project); - if (!fs.existsSync(dir)) { - const checkoutInfo = await getCheckoutInfo( - context, - node, - nodeTriggeringTheJob - ); - - if (checkoutInfo == undefined) { - const msg = `[${node.project}] Trying to checking out ${node.project} into '${dir}'. It does not exist.`; - logger.error(msg); - throw new Error(msg); - } - if (checkoutInfo.merge) { - logger.info( - `[${node.project}] Merging ${context.config.github.serverUrl}/${node.project}:${checkoutInfo.targetBranch} into ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}` - ); - try { - await clone( - `${context.config.github.serverUrl}/${node.project}`, - dir, - checkoutInfo.targetBranch - ); - } catch (err) { - logger.error( - `[${node.project}] Error checking out (before merging) ${context.config.github.serverUrl}/${node.repo.group}/${node.project}:${context.config.github.targetBranch}` - ); - throw err; - } - try { - await gitMerge( - dir, - checkoutInfo.group, - checkoutInfo.project, - checkoutInfo.branch - ); - } catch (err) { - logger.error( - `[${node.project}] Error merging ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}. Please manually merge it and relaunch.` - ); - throw err; - } - } else { - try { - logger.info( - `[${node.project}] Checking out '${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}' into '${dir}'` - ); - await clone( - `${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}`, - dir, - checkoutInfo.branch - ); - } catch (err) { - logger.error( - `[${node.project}] Error checking out ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}.` - ); - throw err; - } - } - return checkoutInfo; - } else { - logger.info( - `[${node.project}] folder ${dir} already exists, nothing to checkout` - ); - return undefined; - } -} - -async function checkoutProjectBranchFlow(context, node, nodeTriggeringTheJob) { - logger.info(`[${node.project}] Checking out project`); - const dir = getDir(context.config.rootFolder, node.project); - if (!fs.existsSync(dir)) { - const checkoutInfo = await getCheckoutInfo( - context, - node, - nodeTriggeringTheJob - ); - - if (checkoutInfo == undefined) { - const msg = `Trying to checking out ${node.project} into '${dir}'. It does not exist.`; - logger.error(msg); - throw new Error(msg); - } - try { - logger.info( - `Checking out '${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.targetBranch}' into '${dir}'` - ); - await clone( - `${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}`, - dir, - checkoutInfo.targetBranch - ); - } catch (err) { - logger.error( - `Error checking out ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}.` - ); - throw err; - } - return checkoutInfo; - } else { - logger.info(`folder ${dir} already exists, nothing to checkout`); - return undefined; - } -} - -async function getCheckoutInfo(context, node, nodeTriggeringTheJob) { - const mapping = getMapping( - nodeTriggeringTheJob.project, - nodeTriggeringTheJob.mapping, - node.project, - node.mapping, - context.config.github.targetBranch - ); - const sourceGroup = context.config.github.sourceGroup; - const sourceBranch = context.config.github.sourceBranch; - const targetGroup = node.repo.group; - const targetProject = node.repo.name; - const targetBranch = mapping.target; - const forkedProjectName = await getForkedProjectName( - context.octokit, - targetGroup, - targetProject, - sourceGroup - ); - logger.info( - `[${targetGroup}/${targetProject}] Getting checkout Info. sourceProject: ${forkedProjectName} sourceGroup: ${sourceGroup}. sourceBranch: ${sourceBranch}. targetGroup: ${targetGroup}. targetBranch: ${targetBranch}. Mapping: ${ - mapping.source - ? "source:" + mapping.source + " target:" + mapping.target - : "Not defined" - }` - ); - return (await doesBranchExist( - context.octokit, - sourceGroup, - forkedProjectName, - sourceBranch - )) - ? { - project: forkedProjectName, - group: sourceGroup, - branch: sourceBranch, - targetGroup, - targetBranch, - merge: await hasPullRequest( - context.octokit, - targetGroup, - targetProject, - sourceBranch, - context.config.github.author - ) - } - : (await doesBranchExist( - context.octokit, - targetGroup, - targetProject, - sourceBranch - )) - ? { - project: targetProject, - group: targetGroup, - branch: sourceBranch, - targetGroup, - targetBranch, - merge: await hasPullRequest( - context.octokit, - targetGroup, - targetProject, - sourceBranch, - context.config.github.author - ) - } - : (await doesBranchExist( - context.octokit, - targetGroup, - targetProject, - targetBranch - )) - ? { - project: targetProject, - group: targetGroup, - branch: targetBranch, - targetGroup, - targetBranch, - merge: false - } - : undefined; -} - -/** - * it returns back a {source, target} object - * @param {String} projectTriggeringTheJob the project name of the project triggering the job - * @param {Object} projectTriggeringTheJobMapping the mapping object of the project triggering the job - * @param {String} currentProject the project name of the current project - * @param {Object} currentProjectMapping the mappinf object of the current project - * @param {String} targetBranch the target branch - */ -function getMapping( - projectTriggeringTheJob, - projectTriggeringTheJobMapping, - currentProject, - currentProjectMapping, - targetBranch -) { - // If the current project it the project triggering the job there's no mapping - if (currentProject !== projectTriggeringTheJob) { - // If the current project has been excluded from the mapping, there's no mapping - if ( - projectTriggeringTheJobMapping && - (projectTriggeringTheJobMapping.exclude - ? !projectTriggeringTheJobMapping.exclude.includes(currentProject) - : true) - ) { - // The mapping is either taken from the project mapping or from the default one - const mapping = projectTriggeringTheJobMapping.dependencies[ - currentProject - ] - ? projectTriggeringTheJobMapping.dependencies[currentProject] - : projectTriggeringTheJobMapping.dependencies.default; - return { source: mapping.source, target: mapping.target }; - // If the current project has a mapping and the source matched with the targetBranch then this mapping is taken - } else if ( - currentProjectMapping && - currentProjectMapping.source === targetBranch - ) { - return { - source: currentProjectMapping.source, - target: currentProjectMapping.target - }; - } else { - return { target: targetBranch }; - } - } else { - return { target: targetBranch }; - } -} - -function getDir(rootFolder, project) { - const folder = - rootFolder !== undefined && rootFolder !== "" ? rootFolder : "."; - return `${folder}/${project.replace(/ |-/g, "_")}`; -} - -async function getForkedProjectName(octokit, owner, project, wantedOwner) { - if (owner !== wantedOwner) { - const forkedProject = await getForkedProject( - octokit, - owner, - project, - wantedOwner - ); - return !forkedProject || !forkedProject.name ? project : forkedProject.name; - } else { - return project; - } -} - -/** - * it checks what's the proper URL to retrieve definition from in case a URL with ${} expression is defined. It will try sourceGroup/sourceBranch then targetGroup/sourceBranch and then targetGroup/targetBranch in this order. - * @param {Object} context the context information - * @param {Object} definitionFile the definition file path or URL - */ -async function getPlaceHolders(context, definitionFile) { - if (definitionFile.startsWith("http") && definitionFile.includes("${")) { - const placeHolderSource = { - GROUP: context.config.github.sourceGroup, - PROJECT_NAME: context.config.github.project, - BRANCH: context.config.github.sourceBranch - }; - const sourceUrl = treatUrl(definitionFile, placeHolderSource); - if (!(await checkUrlExist(sourceUrl))) { - const placeHoldersTargetSource = { - GROUP: context.config.github.group, - PROJECT_NAME: context.config.github.project, - BRANCH: context.config.github.sourceBranch - }; - const targetSourceUrl = treatUrl( - definitionFile, - placeHoldersTargetSource - ); - if (!(await checkUrlExist(targetSourceUrl))) { - const placeHoldersTarget = { - GROUP: context.config.github.group, - PROJECT_NAME: context.config.github.project, - BRANCH: context.config.github.targetBranch - }; - const targetUrl = treatUrl(definitionFile, placeHoldersTarget); - if (!(await checkUrlExist(targetUrl))) { - throw new Error( - `Definition file ${definitionFile} does not exist for any of these cases: ${sourceUrl}, ${targetSourceUrl} or ${targetUrl}` - ); - } else { - return placeHoldersTarget; - } - } else { - return placeHoldersTargetSource; - } - } else { - return placeHolderSource; - } - } - return {}; -} - -module.exports = { - checkoutDefinitionTree, - getCheckoutInfo, - getDir, - getPlaceHolders, - getMapping -}; - - -/***/ }), -/* 331 */, -/* 332 */, -/* 333 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { readDefinitionFile } = __webpack_require__(846); -const { treatProject } = __webpack_require__(866); -const { validateNode } = __webpack_require__(755); - -/** - * It will return back the definition tree plus dependencies as an object. - * - * @param {string} file - The definition file. It can be a URL or a in the filesystem. - * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL - */ -async function getTree(file, urlPlaceHolders = {}) { - const definition = await readDefinitionFile(file, urlPlaceHolders); - return dependencyListToTree(definition.dependencies, definition); -} - -/** - * It will return back the definition tree plus dependencies as an object for a particular project. - * - * @param {string} file - The definition file. It can be a URL or a in the filesystem. - * @param {string} project - The project name to look for. - * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL - */ -async function getTreeForProject(file, project, urlPlaceHolders = {}) { - return lookForProject( - await getTree(file, urlPlaceHolders), - project, - undefined - ); -} - -/** - * It returns the leaf from the tree corresponding to the project name - * @param {Object} tree - * @param {String} project the project name - */ -function lookForProject(tree, project) { - if (tree && tree.length > 0) { - const leaf = tree.find(leaf => project === leaf.project); - return leaf - ? leaf - : tree - .map(leaf => lookForProject(leaf.children, project)) - .find(child => child); - } else { - return undefined; - } -} - -/** - * it treats the dependenciy list together with the build configuration to compose the build tree - * @param {Object} dependencyList the dependency list - * @param {Object} buildConfiguration the build configuration directly read from the yaml file - */ -function dependencyListToTree(dependencyList, buildConfiguration) { - const map = dependencyList.reduce((map, dependency, i) => { - map[dependency.project] = { index: i, node: undefined }; - dependency.children = []; - dependency.parents = []; - return map; - }, {}); - - return dependencyList.reduce((roots, node) => { - validateNode(node); - map[node.project].node = { - ...node, - repo: { - group: node.project.split("/")[0], - name: node.project.split("/")[1] - }, - ...treatProject(node.project, buildConfiguration) - }; - - if (node.dependencies && node.dependencies.length > 0) { - node.dependencies.forEach(dependency => { - dependencyList[map[dependency.project].index].children.push({ - ...map[node.project].node - }); - node.parents.push({ - ...map[dependency.project].node - }); - }); - } else { - roots.push(map[node.project].node); - } - return roots; - }, []); -} - -module.exports = { - getTree, - getTreeForProject, - dependencyListToTree -}; - - -/***/ }), -/* 334 */, -/* 335 */, -/* 336 */, -/* 337 */, -/* 338 */, -/* 339 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -function resolveYamlNull(data) { - if (data === null) return true; - - var max = data.length; - - return (max === 1 && data === '~') || - (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); -} - -function constructYamlNull() { - return null; -} - -function isNull(object) { - return object === null; -} - -module.exports = new Type('tag:yaml.org,2002:null', { - kind: 'scalar', - resolve: resolveYamlNull, - construct: constructYamlNull, - predicate: isNull, - represent: { - canonical: function () { return '~'; }, - lowercase: function () { return 'null'; }, - uppercase: function () { return 'NULL'; }, - camelcase: function () { return 'Null'; } - }, - defaultStyle: 'lowercase' -}); - - -/***/ }), -/* 340 */, -/* 341 */, -/* 342 */, -/* 343 */, -/* 344 */, -/* 345 */, -/* 346 */, -/* 347 */, -/* 348 */, -/* 349 */, -/* 350 */, -/* 351 */, -/* 352 */, -/* 353 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const u = __webpack_require__(0).fromCallback -module.exports = { - move: u(__webpack_require__(92)) -} - - -/***/ }), -/* 354 */, -/* 355 */, -/* 356 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -/*! - * is-plain-object - * - * Copyright (c) 2014-2017, Jon Schlinkert. - * Released under the MIT License. - */ - -function isObject(o) { - return Object.prototype.toString.call(o) === '[object Object]'; -} - -function isPlainObject(o) { - var ctor,prot; - - if (isObject(o) === false) return false; - - // If has modified constructor - ctor = o.constructor; - if (ctor === undefined) return true; - - // If has modified prototype - prot = ctor.prototype; - if (isObject(prot) === false) return false; - - // If constructor does not have an Object-specific method - if (prot.hasOwnProperty('isPrototypeOf') === false) { - return false; - } - - // Most likely a plain Object - return true; -} - -exports.isPlainObject = isPlainObject; - - -/***/ }), -/* 357 */ -/***/ (function(module) { - -module.exports = require("assert"); - -/***/ }), -/* 358 */, -/* 359 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __importStar(__webpack_require__(470)); -const upload_specification_1 = __webpack_require__(590); -const upload_http_client_1 = __webpack_require__(608); -const utils_1 = __webpack_require__(870); -const download_http_client_1 = __webpack_require__(855); -const download_specification_1 = __webpack_require__(532); -const config_variables_1 = __webpack_require__(401); -const path_1 = __webpack_require__(622); -class DefaultArtifactClient { - /** - * Constructs a DefaultArtifactClient - */ - static create() { - return new DefaultArtifactClient(); - } - /** - * Uploads an artifact - */ - uploadArtifact(name, files, rootDirectory, options) { - return __awaiter(this, void 0, void 0, function* () { - utils_1.checkArtifactName(name); - // Get specification for the files being uploaded - const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); - const uploadResponse = { - artifactName: name, - artifactItems: [], - size: 0, - failedItems: [] - }; - const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); - if (uploadSpecification.length === 0) { - core.warning(`No files found that can be uploaded`); - } - else { - // Create an entry for the artifact in the file container - const response = yield uploadHttpClient.createArtifactInFileContainer(name); - if (!response.fileContainerResourceUrl) { - core.debug(response.toString()); - throw new Error('No URL provided by the Artifact Service to upload an artifact to'); - } - core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); - // Upload each of the files that were found concurrently - const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); - // Update the size of the artifact to indicate we are done uploading - // The uncompressed size is used for display when downloading a zip of the artifact from the UI - yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); - core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`); - uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); - uploadResponse.size = uploadResult.uploadSize; - uploadResponse.failedItems = uploadResult.failedItems; - } - return uploadResponse; - }); - } - downloadArtifact(name, path, options) { - return __awaiter(this, void 0, void 0, function* () { - const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); - const artifacts = yield downloadHttpClient.listArtifacts(); - if (artifacts.count === 0) { - throw new Error(`Unable to find any artifacts for the associated workflow`); - } - const artifactToDownload = artifacts.value.find(artifact => { - return artifact.name === name; - }); - if (!artifactToDownload) { - throw new Error(`Unable to find an artifact with the name: ${name}`); - } - const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); - if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); - } - path = path_1.normalize(path); - path = path_1.resolve(path); - // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories - const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); - if (downloadSpecification.filesToDownload.length === 0) { - core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); - } - else { - // Create all necessary directories recursively before starting any download - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - core.info('Directory structure has been setup for the artifact'); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); - yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); - } - return { - artifactName: name, - downloadPath: downloadSpecification.rootDownloadLocation - }; - }); - } - downloadAllArtifacts(path) { - return __awaiter(this, void 0, void 0, function* () { - const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); - const response = []; - const artifacts = yield downloadHttpClient.listArtifacts(); - if (artifacts.count === 0) { - core.info('Unable to find any artifacts for the associated workflow'); - return response; - } - if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); - } - path = path_1.normalize(path); - path = path_1.resolve(path); - let downloadedArtifacts = 0; - while (downloadedArtifacts < artifacts.count) { - const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; - downloadedArtifacts += 1; - // Get container entries for the specific artifact - const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); - if (downloadSpecification.filesToDownload.length === 0) { - core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); - } - else { - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); - yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); - } - response.push({ - artifactName: currentArtifactToDownload.name, - downloadPath: downloadSpecification.rootDownloadLocation - }); - } - return response; - }); - } -} -exports.DefaultArtifactClient = DefaultArtifactClient; -//# sourceMappingURL=artifact-client.js.map - -/***/ }), -/* 360 */, -/* 361 */, -/* 362 */, -/* 363 */ -/***/ (function(module) { - -module.exports = register - -function register (state, name, method, options) { - if (typeof method !== 'function') { - throw new Error('method for before hook must be a function') - } - - if (!options) { - options = {} - } - - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options) - }, method)() - } - - return Promise.resolve() - .then(function () { - if (!state.registry[name]) { - return method(options) - } - - return (state.registry[name]).reduce(function (method, registered) { - return registered.hook.bind(null, method, options) - }, method)() - }) -} - - -/***/ }), -/* 364 */, -/* 365 */, -/* 366 */, -/* 367 */, -/* 368 */, -/* 369 */, -/* 370 */, -/* 371 */, -/* 372 */, -/* 373 */, -/* 374 */, -/* 375 */ -/***/ (function(module) { - -function formatDate(date) { - return `${date.getFullYear()}${date.getMonth()}${date.getDate()}${date.getHours()}${date.getMinutes()}${date.getSeconds()}`; -} - -module.exports = { formatDate }; - - -/***/ }), -/* 376 */, -/* 377 */, -/* 378 */, -/* 379 */, -/* 380 */, -/* 381 */, -/* 382 */, -/* 383 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const assert = __webpack_require__(357); -const path = __webpack_require__(622); -const pathHelper = __webpack_require__(972); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Helper class for parsing paths into segments - */ -class Path { - /** - * Constructs a Path - * @param itemPath Path or array of segments - */ - constructor(itemPath) { - this.segments = []; - // String - if (typeof itemPath === 'string') { - assert(itemPath, `Parameter 'itemPath' must not be empty`); - // Normalize slashes and trim unnecessary trailing slash - itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); - // Not rooted - if (!pathHelper.hasRoot(itemPath)) { - this.segments = itemPath.split(path.sep); - } - // Rooted - else { - // Add all segments, while not at the root - let remaining = itemPath; - let dir = pathHelper.dirname(remaining); - while (dir !== remaining) { - // Add the segment - const basename = path.basename(remaining); - this.segments.unshift(basename); - // Truncate the last segment - remaining = dir; - dir = pathHelper.dirname(remaining); - } - // Remainder is the root - this.segments.unshift(remaining); - } - } - // Array - else { - // Must not be empty - assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); - // Each segment - for (let i = 0; i < itemPath.length; i++) { - let segment = itemPath[i]; - // Must not be empty - assert(segment, `Parameter 'itemPath' must not contain any empty segments`); - // Normalize slashes - segment = pathHelper.normalizeSeparators(itemPath[i]); - // Root segment - if (i === 0 && pathHelper.hasRoot(segment)) { - segment = pathHelper.safeTrimTrailingSeparator(segment); - assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); - this.segments.push(segment); - } - // All other segments - else { - // Must not contain slash - assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); - this.segments.push(segment); - } - } - } - } - /** - * Converts the path to it's string representation - */ - toString() { - // First segment - let result = this.segments[0]; - // All others - let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); - for (let i = 1; i < this.segments.length; i++) { - if (skipSlash) { - skipSlash = false; - } - else { - result += path.sep; - } - result += this.segments[i]; - } - return result; - } -} -exports.Path = Path; -//# sourceMappingURL=internal-path.js.map - -/***/ }), -/* 384 */, -/* 385 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -var isPlainObject = __webpack_require__(356); -var universalUserAgent = __webpack_require__(40); - -function lowercaseKeys(object) { - if (!object) { - return {}; - } - - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach(key => { - if (isPlainObject.isPlainObject(options[key])) { - if (!(key in defaults)) Object.assign(result, { - [key]: options[key] - });else result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { - [key]: options[key] - }); - } - }); - return result; -} - -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === undefined) { - delete obj[key]; - } - } - - return obj; -} - -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { - method, - url - } : { - url: method - }, options); - } else { - options = Object.assign({}, route); - } // lowercase header names before merging with defaults to avoid duplicates - - - options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging - - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten - - if (defaults && defaults.mediaType.previews.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); - } - - mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); - return mergedOptions; -} - -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - - if (names.length === 0) { - return url; - } - - return url + separator + names.map(name => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -const urlVariableRegex = /\{[^}]+\}/g; - -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} - -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - - if (!matches) { - return []; - } - - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -function omit(object, keysToOmit) { - return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { - obj[key] = object[key]; - return obj; - }, {}); -} - -// Based on https://github.com/bramstein/url-template, licensed under BSD -// TODO: create separate package. -// -// Copyright (c) 2012-2014, Bram Stein -// All rights reserved. -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions -// are met: -// 1. Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// 2. Redistributions in binary form must reproduce the above copyright -// notice, this list of conditions and the following disclaimer in the -// documentation and/or other materials provided with the distribution. -// 3. The name of the author may not be used to endorse or promote products -// derived from this software without specific prior written permission. -// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED -// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF -// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO -// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, -// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, -// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY -// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING -// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, -// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -/* istanbul ignore file */ -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - - return part; - }).join(""); -} - -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} - -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} - -function isDefined(value) { - return value !== undefined && value !== null; -} - -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} - -function getValues(context, operator, key, modifier) { - var value = context[key], - result = []; - - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function (value) { - tmp.push(encodeValue(operator, value)); - }); - } else { - Object.keys(value).forEach(function (k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - - return result; -} - -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} - -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - - expression.split(/,/g).forEach(function (variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - - if (operator && operator !== "+") { - var separator = ","; - - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - }); -} - -function parse(options) { - // https://fetch.spec.whatwg.org/#methods - let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible - - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later - - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - - const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - - if (!isBinaryRequest) { - if (options.mediaType.format) { - // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw - headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); - } - - if (options.mediaType.previews.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } // for GET/HEAD requests, set URL query parameters from remaining parameters - // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters - - - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } else { - headers["content-length"] = 0; - } - } - } // default content-type for JSON if body is set - - - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. - // fetch does not allow to set `content-length` header, but we can set body to an empty string - - - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } // Only return body/request keys if present - - - return Object.assign({ - method, - url, - headers - }, typeof body !== "undefined" ? { - body - } : null, options.request ? { - request: options.request - } : null); -} - -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS = merge(oldDefaults, newDefaults); - const endpoint = endpointWithDefaults.bind(null, DEFAULTS); - return Object.assign(endpoint, { - DEFAULTS, - defaults: withDefaults.bind(null, DEFAULTS), - merge: merge.bind(null, DEFAULTS), - parse - }); -} - -const VERSION = "6.0.8"; - -const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. -// So we use RequestParameters and add method as additional required property. - -const DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "", - previews: [] - } -}; - -const endpoint = withDefaults(null, DEFAULTS); - -exports.endpoint = endpoint; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 386 */, -/* 387 */, -/* 388 */, -/* 389 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(747); -const shebangCommand = __webpack_require__(727); - -function readShebang(command) { - // Read the first 150 bytes from the file - const size = 150; - let buffer; - - if (Buffer.alloc) { - // Node.js v4.5+ / v5.10+ - buffer = Buffer.alloc(size); - } else { - // Old Node.js API - buffer = new Buffer(size); - buffer.fill(0); // zero-fill - } - - let fd; - - try { - fd = fs.openSync(command, 'r'); - fs.readSync(fd, buffer, 0, size, 0); - fs.closeSync(fd); - } catch (e) { /* Empty */ } - - // Attempt to extract shebang (null is returned if not a shebang) - return shebangCommand(buffer.toString()); -} - -module.exports = readShebang; - - -/***/ }), -/* 390 */, -/* 391 */, -/* 392 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && "version" in process) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 393 */, -/* 394 */, -/* 395 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { create } = __webpack_require__(214); -const core = __webpack_require__(470); -const noFileOptions = __webpack_require__(787); -const { findFilesToUpload } = __webpack_require__(819); -const { logger } = __webpack_require__(79); -var assert = __webpack_require__(357); - -async function run(archiveArtifacts, on = ["success", "failure"]) { - assert(archiveArtifacts, "archiveArtifacts is not defined"); - assert(archiveArtifacts.paths, "archiveArtifacts.paths is not defined"); - assert(archiveArtifacts.name, "archiveArtifacts.name is not defined"); - const path = archiveArtifacts.paths - .filter(pathItem => pathItem && pathItem.path && on.includes(pathItem.on)) - .reduce((acc, pathItem) => acc.concat(pathItem.path, "\n"), "") - .trim(); - try { - logger.info(`Uploading artifacts for path [${path}]`); - const searchResult = await findFilesToUpload(path); - if (searchResult.filesToUpload.length === 0) { - switch (archiveArtifacts.ifNoFilesFound) { - case noFileOptions.error: { - core.setFailed( - `[ERROR] No files were found with the provided path: ${path}. No artifacts will be uploaded.` - ); - break; - } - case noFileOptions.ignore: { - core.info( - `[INFO] No files were found with the provided path: ${path}. No artifacts will be uploaded.` - ); - break; - } - case noFileOptions.warn: - default: { - core.warning( - `[WARNING] No files were found with the provided path: ${path}. No artifacts will be uploaded.` - ); - } - } - } else { - core.info( - `[INFO] With the provided path (${path}), there will be ${searchResult.filesToUpload.length} file(s) uploaded` - ); - core.debug( - `[DEBUG] Root artifact directory is ${searchResult.rootDirectory}` - ); - - const artifactClient = create(); - const options = { - continueOnError: false - }; - const uploadResponse = await artifactClient.uploadArtifact( - archiveArtifacts.name, - searchResult.filesToUpload, - searchResult.rootDirectory, - options - ); - if (uploadResponse.failedItems.length > 0) { - core.setFailed( - `[ERROR] An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.` - ); - } else { - core.info( - `[INFO] Artifact ${uploadResponse.artifactName} has been successfully uploaded!` - ); - } - return uploadResponse; - } - } catch (err) { - core.setFailed(err.message); - } - return undefined; -} -module.exports = { - run -}; - - -/***/ }), -/* 396 */, -/* 397 */, -/* 398 */, -/* 399 */, -/* 400 */, -/* 401 */ -/***/ (function(__unusedmodule, exports) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -// The number of concurrent uploads that happens at the same time -function getUploadFileConcurrency() { - return 2; -} -exports.getUploadFileConcurrency = getUploadFileConcurrency; -// When uploading large files that can't be uploaded with a single http call, this controls -// the chunk size that is used during upload -function getUploadChunkSize() { - return 8 * 1024 * 1024; // 8 MB Chunks -} -exports.getUploadChunkSize = getUploadChunkSize; -// The maximum number of retries that can be attempted before an upload or download fails -function getRetryLimit() { - return 5; -} -exports.getRetryLimit = getRetryLimit; -// With exponential backoff, the larger the retry count, the larger the wait time before another attempt -// The retry multiplier controls by how much the backOff time increases depending on the number of retries -function getRetryMultiplier() { - return 1.5; -} -exports.getRetryMultiplier = getRetryMultiplier; -// The initial wait time if an upload or download fails and a retry is being attempted for the first time -function getInitialRetryIntervalInMilliseconds() { - return 3000; -} -exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; -// The number of concurrent downloads that happens at the same time -function getDownloadFileConcurrency() { - return 2; -} -exports.getDownloadFileConcurrency = getDownloadFileConcurrency; -function getRuntimeToken() { - const token = process.env['ACTIONS_RUNTIME_TOKEN']; - if (!token) { - throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); - } - return token; -} -exports.getRuntimeToken = getRuntimeToken; -function getRuntimeUrl() { - const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; - if (!runtimeUrl) { - throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); - } - return runtimeUrl; -} -exports.getRuntimeUrl = getRuntimeUrl; -function getWorkFlowRunId() { - const workFlowRunId = process.env['GITHUB_RUN_ID']; - if (!workFlowRunId) { - throw new Error('Unable to get GITHUB_RUN_ID env variable'); - } - return workFlowRunId; -} -exports.getWorkFlowRunId = getWorkFlowRunId; -function getWorkSpaceDirectory() { - const workspaceDirectory = process.env['GITHUB_WORKSPACE']; - if (!workspaceDirectory) { - throw new Error('Unable to get GITHUB_WORKSPACE env variable'); - } - return workspaceDirectory; -} -exports.getWorkSpaceDirectory = getWorkSpaceDirectory; -//# sourceMappingURL=config-variables.js.map - -/***/ }), -/* 402 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -/*! - * Tmp - * - * Copyright (c) 2011-2017 KARASZI Istvan - * - * MIT Licensed - */ - -/* - * Module dependencies. - */ -const fs = __webpack_require__(747); -const os = __webpack_require__(87); -const path = __webpack_require__(622); -const crypto = __webpack_require__(417); -const _c = fs.constants && os.constants ? - { fs: fs.constants, os: os.constants } : - process.binding('constants'); -const rimraf = __webpack_require__(569); - -/* - * The working inner variables. - */ -const - // the random characters to choose from - RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', - - TEMPLATE_PATTERN = /XXXXXX/, - - DEFAULT_TRIES = 3, - - CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), - - EBADF = _c.EBADF || _c.os.errno.EBADF, - ENOENT = _c.ENOENT || _c.os.errno.ENOENT, - - DIR_MODE = 448 /* 0o700 */, - FILE_MODE = 384 /* 0o600 */, - - EXIT = 'exit', - - SIGINT = 'SIGINT', - - // this will hold the objects need to be removed on exit - _removeObjects = []; - -var - _gracefulCleanup = false; - -/** - * Random name generator based on crypto. - * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript - * - * @param {number} howMany - * @returns {string} the generated random name - * @private - */ -function _randomChars(howMany) { - var - value = [], - rnd = null; - - // make sure that we do not fail because we ran out of entropy - try { - rnd = crypto.randomBytes(howMany); - } catch (e) { - rnd = crypto.pseudoRandomBytes(howMany); - } - - for (var i = 0; i < howMany; i++) { - value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); - } - - return value.join(''); -} - -/** - * Checks whether the `obj` parameter is defined or not. - * - * @param {Object} obj - * @returns {boolean} true if the object is undefined - * @private - */ -function _isUndefined(obj) { - return typeof obj === 'undefined'; -} - -/** - * Parses the function arguments. - * - * This function helps to have optional arguments. - * - * @param {(Options|Function)} options - * @param {Function} callback - * @returns {Array} parsed arguments - * @private - */ -function _parseArguments(options, callback) { - /* istanbul ignore else */ - if (typeof options === 'function') { - return [{}, options]; - } - - /* istanbul ignore else */ - if (_isUndefined(options)) { - return [{}, callback]; - } - - return [options, callback]; -} - -/** - * Generates a new temporary name. - * - * @param {Object} opts - * @returns {string} the new random name according to opts - * @private - */ -function _generateTmpName(opts) { - - const tmpDir = _getTmpDir(); - - // fail early on missing tmp dir - if (isBlank(opts.dir) && isBlank(tmpDir)) { - throw new Error('No tmp dir specified'); - } - - /* istanbul ignore else */ - if (!isBlank(opts.name)) { - return path.join(opts.dir || tmpDir, opts.name); - } - - // mkstemps like template - // opts.template has already been guarded in tmpName() below - /* istanbul ignore else */ - if (opts.template) { - var template = opts.template; - // make sure that we prepend the tmp path if none was given - /* istanbul ignore else */ - if (path.basename(template) === template) - template = path.join(opts.dir || tmpDir, template); - return template.replace(TEMPLATE_PATTERN, _randomChars(6)); - } - - // prefix and postfix - const name = [ - (isBlank(opts.prefix) ? 'tmp-' : opts.prefix), - process.pid, - _randomChars(12), - (opts.postfix ? opts.postfix : '') - ].join(''); - - return path.join(opts.dir || tmpDir, name); -} - -/** - * Gets a temporary file name. - * - * @param {(Options|tmpNameCallback)} options options or callback - * @param {?tmpNameCallback} callback the callback function - */ -function tmpName(options, callback) { - var - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1], - tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; - - /* istanbul ignore else */ - if (isNaN(tries) || tries < 0) - return cb(new Error('Invalid tries')); - - /* istanbul ignore else */ - if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) - return cb(new Error('Invalid template provided')); - - (function _getUniqueName() { - try { - const name = _generateTmpName(opts); - - // check whether the path exists then retry if needed - fs.stat(name, function (err) { - /* istanbul ignore else */ - if (!err) { - /* istanbul ignore else */ - if (tries-- > 0) return _getUniqueName(); - - return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); - } - - cb(null, name); - }); - } catch (err) { - cb(err); - } - }()); -} - -/** - * Synchronous version of tmpName. - * - * @param {Object} options - * @returns {string} the generated random name - * @throws {Error} if the options are invalid or could not generate a filename - */ -function tmpNameSync(options) { - var - args = _parseArguments(options), - opts = args[0], - tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; - - /* istanbul ignore else */ - if (isNaN(tries) || tries < 0) - throw new Error('Invalid tries'); - - /* istanbul ignore else */ - if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) - throw new Error('Invalid template provided'); - - do { - const name = _generateTmpName(opts); - try { - fs.statSync(name); - } catch (e) { - return name; - } - } while (tries-- > 0); - - throw new Error('Could not get a unique tmp filename, max tries reached'); -} - -/** - * Creates and opens a temporary file. - * - * @param {(Options|fileCallback)} options the config options or the callback function - * @param {?fileCallback} callback - */ -function file(options, callback) { - var - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; - - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); - - // create and open the file - fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { - /* istanbul ignore else */ - if (err) return cb(err); - - if (opts.discardDescriptor) { - return fs.close(fd, function _discardCallback(err) { - /* istanbul ignore else */ - if (err) { - // Low probability, and the file exists, so this could be - // ignored. If it isn't we certainly need to unlink the - // file, and if that fails too its error is more - // important. - try { - fs.unlinkSync(name); - } catch (e) { - if (!isENOENT(e)) { - err = e; - } - } - return cb(err); - } - cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts)); - }); - } - /* istanbul ignore else */ - if (opts.detachDescriptor) { - return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts)); - } - cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts)); - }); - }); -} - -/** - * Synchronous version of file. - * - * @param {Options} options - * @returns {FileSyncObject} object consists of name, fd and removeCallback - * @throws {Error} if cannot create a file - */ -function fileSync(options) { - var - args = _parseArguments(options), - opts = args[0]; - - const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; - const name = tmpNameSync(opts); - var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); - /* istanbul ignore else */ - if (opts.discardDescriptor) { - fs.closeSync(fd); - fd = undefined; - } - - return { - name: name, - fd: fd, - removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts) - }; -} - -/** - * Creates a temporary directory. - * - * @param {(Options|dirCallback)} options the options or the callback function - * @param {?dirCallback} callback - */ -function dir(options, callback) { - var - args = _parseArguments(options, callback), - opts = args[0], - cb = args[1]; - - // gets a temporary filename - tmpName(opts, function _tmpNameCreated(err, name) { - /* istanbul ignore else */ - if (err) return cb(err); - - // create the directory - fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { - /* istanbul ignore else */ - if (err) return cb(err); - - cb(null, name, _prepareTmpDirRemoveCallback(name, opts)); - }); - }); -} - -/** - * Synchronous version of dir. - * - * @param {Options} options - * @returns {DirSyncObject} object consists of name and removeCallback - * @throws {Error} if it cannot create a directory - */ -function dirSync(options) { - var - args = _parseArguments(options), - opts = args[0]; - - const name = tmpNameSync(opts); - fs.mkdirSync(name, opts.mode || DIR_MODE); - - return { - name: name, - removeCallback: _prepareTmpDirRemoveCallback(name, opts) - }; -} - -/** - * Removes files asynchronously. - * - * @param {Object} fdPath - * @param {Function} next - * @private - */ -function _removeFileAsync(fdPath, next) { - const _handler = function (err) { - if (err && !isENOENT(err)) { - // reraise any unanticipated error - return next(err); - } - next(); - } - - if (0 <= fdPath[0]) - fs.close(fdPath[0], function (err) { - fs.unlink(fdPath[1], _handler); - }); - else fs.unlink(fdPath[1], _handler); -} - -/** - * Removes files synchronously. - * - * @param {Object} fdPath - * @private - */ -function _removeFileSync(fdPath) { - try { - if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); - } catch (e) { - // reraise any unanticipated error - if (!isEBADF(e) && !isENOENT(e)) throw e; - } finally { - try { - fs.unlinkSync(fdPath[1]); - } - catch (e) { - // reraise any unanticipated error - if (!isENOENT(e)) throw e; - } - } -} - -/** - * Prepares the callback for removal of the temporary file. - * - * @param {string} name the path of the file - * @param {number} fd file descriptor - * @param {Object} opts - * @returns {fileCallback} - * @private - */ -function _prepareTmpFileRemoveCallback(name, fd, opts) { - const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name]); - const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], removeCallbackSync); - - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - - return removeCallback; -} - -/** - * Simple wrapper for rimraf. - * - * @param {string} dirPath - * @param {Function} next - * @private - */ -function _rimrafRemoveDirWrapper(dirPath, next) { - rimraf(dirPath, next); -} - -/** - * Simple wrapper for rimraf.sync. - * - * @param {string} dirPath - * @private - */ -function _rimrafRemoveDirSyncWrapper(dirPath, next) { - try { - return next(null, rimraf.sync(dirPath)); - } catch (err) { - return next(err); - } -} - -/** - * Prepares the callback for removal of the temporary directory. - * - * @param {string} name - * @param {Object} opts - * @returns {Function} the callback - * @private - */ -function _prepareTmpDirRemoveCallback(name, opts) { - const removeFunction = opts.unsafeCleanup ? _rimrafRemoveDirWrapper : fs.rmdir.bind(fs); - const removeFunctionSync = opts.unsafeCleanup ? _rimrafRemoveDirSyncWrapper : fs.rmdirSync.bind(fs); - const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name); - const removeCallback = _prepareRemoveCallback(removeFunction, name, removeCallbackSync); - if (!opts.keep) _removeObjects.unshift(removeCallbackSync); - - return removeCallback; -} - -/** - * Creates a guarded function wrapping the removeFunction call. - * - * @param {Function} removeFunction - * @param {Object} arg - * @returns {Function} - * @private - */ -function _prepareRemoveCallback(removeFunction, arg, cleanupCallbackSync) { - var called = false; - - return function _cleanupCallback(next) { - next = next || function () {}; - if (!called) { - const toRemove = cleanupCallbackSync || _cleanupCallback; - const index = _removeObjects.indexOf(toRemove); - /* istanbul ignore else */ - if (index >= 0) _removeObjects.splice(index, 1); - - called = true; - // sync? - if (removeFunction.length === 1) { - try { - removeFunction(arg); - return next(null); - } - catch (err) { - // if no next is provided and since we are - // in silent cleanup mode on process exit, - // we will ignore the error - return next(err); - } - } else return removeFunction(arg, next); - } else return next(new Error('cleanup callback has already been called')); - }; -} - -/** - * The garbage collector. - * - * @private - */ -function _garbageCollector() { - /* istanbul ignore else */ - if (!_gracefulCleanup) return; - - // the function being called removes itself from _removeObjects, - // loop until _removeObjects is empty - while (_removeObjects.length) { - try { - _removeObjects[0](); - } catch (e) { - // already removed? - } - } -} - -/** - * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. - */ -function isEBADF(error) { - return isExpectedError(error, -EBADF, 'EBADF'); -} - -/** - * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. - */ -function isENOENT(error) { - return isExpectedError(error, -ENOENT, 'ENOENT'); -} - -/** - * Helper to determine whether the expected error code matches the actual code and errno, - * which will differ between the supported node versions. - * - * - Node >= 7.0: - * error.code {string} - * error.errno {string|number} any numerical value will be negated - * - * - Node >= 6.0 < 7.0: - * error.code {string} - * error.errno {number} negated - * - * - Node >= 4.0 < 6.0: introduces SystemError - * error.code {string} - * error.errno {number} negated - * - * - Node >= 0.10 < 4.0: - * error.code {number} negated - * error.errno n/a - */ -function isExpectedError(error, code, errno) { - return error.code === code || error.code === errno; -} - -/** - * Helper which determines whether a string s is blank, that is undefined, or empty or null. - * - * @private - * @param {string} s - * @returns {Boolean} true whether the string s is blank, false otherwise - */ -function isBlank(s) { - return s === null || s === undefined || !s.trim(); -} - -/** - * Sets the graceful cleanup. - */ -function setGracefulCleanup() { - _gracefulCleanup = true; -} - -/** - * Returns the currently configured tmp dir from os.tmpdir(). - * - * @private - * @returns {string} the currently configured tmp dir - */ -function _getTmpDir() { - return os.tmpdir(); -} - -/** - * If there are multiple different versions of tmp in place, make sure that - * we recognize the old listeners. - * - * @param {Function} listener - * @private - * @returns {Boolean} true whether listener is a legacy listener - */ -function _is_legacy_listener(listener) { - return (listener.name === '_exit' || listener.name === '_uncaughtExceptionThrown') - && listener.toString().indexOf('_garbageCollector();') > -1; -} - -/** - * Safely install SIGINT listener. - * - * NOTE: this will only work on OSX and Linux. - * - * @private - */ -function _safely_install_sigint_listener() { - - const listeners = process.listeners(SIGINT); - const existingListeners = []; - for (let i = 0, length = listeners.length; i < length; i++) { - const lstnr = listeners[i]; - /* istanbul ignore else */ - if (lstnr.name === '_tmp$sigint_listener') { - existingListeners.push(lstnr); - process.removeListener(SIGINT, lstnr); - } - } - process.on(SIGINT, function _tmp$sigint_listener(doExit) { - for (let i = 0, length = existingListeners.length; i < length; i++) { - // let the existing listener do the garbage collection (e.g. jest sandbox) - try { - existingListeners[i](false); - } catch (err) { - // ignore - } - } - try { - // force the garbage collector even it is called again in the exit listener - _garbageCollector(); - } finally { - if (!!doExit) { - process.exit(0); - } - } - }); -} - -/** - * Safely install process exit listener. - * - * @private - */ -function _safely_install_exit_listener() { - const listeners = process.listeners(EXIT); - - // collect any existing listeners - const existingListeners = []; - for (let i = 0, length = listeners.length; i < length; i++) { - const lstnr = listeners[i]; - /* istanbul ignore else */ - // TODO: remove support for legacy listeners once release 1.0.0 is out - if (lstnr.name === '_tmp$safe_listener' || _is_legacy_listener(lstnr)) { - // we must forget about the uncaughtException listener, hopefully it is ours - if (lstnr.name !== '_uncaughtExceptionThrown') { - existingListeners.push(lstnr); - } - process.removeListener(EXIT, lstnr); - } - } - // TODO: what was the data parameter good for? - process.addListener(EXIT, function _tmp$safe_listener(data) { - for (let i = 0, length = existingListeners.length; i < length; i++) { - // let the existing listener do the garbage collection (e.g. jest sandbox) - try { - existingListeners[i](data); - } catch (err) { - // ignore - } - } - _garbageCollector(); - }); -} - -_safely_install_exit_listener(); -_safely_install_sigint_listener(); - -/** - * Configuration options. - * - * @typedef {Object} Options - * @property {?number} tries the number of tries before give up the name generation - * @property {?string} template the "mkstemp" like filename template - * @property {?string} name fix name - * @property {?string} dir the tmp directory to use - * @property {?string} prefix prefix for the generated name - * @property {?string} postfix postfix for the generated name - * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty - */ - -/** - * @typedef {Object} FileSyncObject - * @property {string} name the name of the file - * @property {string} fd the file descriptor - * @property {fileCallback} removeCallback the callback function to remove the file - */ - -/** - * @typedef {Object} DirSyncObject - * @property {string} name the name of the directory - * @property {fileCallback} removeCallback the callback function to remove the directory - */ - -/** - * @callback tmpNameCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - */ - -/** - * @callback fileCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {number} fd the file descriptor - * @param {cleanupCallback} fn the cleanup callback function - */ - -/** - * @callback dirCallback - * @param {?Error} err the error object if anything goes wrong - * @param {string} name the temporary file name - * @param {cleanupCallback} fn the cleanup callback function - */ - -/** - * Removes the temporary created file or directory. - * - * @callback cleanupCallback - * @param {simpleCallback} [next] function to call after entry was removed - */ - -/** - * Callback function for function composition. - * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} - * - * @callback simpleCallback - */ - -// exporting all the needed methods - -// evaluate os.tmpdir() lazily, mainly for simplifying testing but it also will -// allow users to reconfigure the temporary directory -Object.defineProperty(module.exports, 'tmpdir', { - enumerable: true, - configurable: false, - get: function () { - return _getTmpDir(); - } -}); - -module.exports.dir = dir; -module.exports.dirSync = dirSync; - -module.exports.file = file; -module.exports.fileSync = fileSync; - -module.exports.tmpName = tmpName; -module.exports.tmpNameSync = tmpNameSync; - -module.exports.setGracefulCleanup = setGracefulCleanup; - - -/***/ }), -/* 403 */, -/* 404 */, -/* 405 */, -/* 406 */, -/* 407 */, -/* 408 */, -/* 409 */, -/* 410 */, -/* 411 */, -/* 412 */, -/* 413 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = __webpack_require__(141); - - -/***/ }), -/* 414 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -var _hasOwnProperty = Object.prototype.hasOwnProperty; - -function resolveYamlSet(data) { - if (data === null) return true; - - var key, object = data; - - for (key in object) { - if (_hasOwnProperty.call(object, key)) { - if (object[key] !== null) return false; - } - } - - return true; -} - -function constructYamlSet(data) { - return data !== null ? data : {}; -} - -module.exports = new Type('tag:yaml.org,2002:set', { - kind: 'mapping', - resolve: resolveYamlSet, - construct: constructYamlSet -}); - - -/***/ }), -/* 415 */, -/* 416 */, -/* 417 */ -/***/ (function(module) { - -module.exports = require("crypto"); - -/***/ }), -/* 418 */, -/* 419 */, -/* 420 */, -/* 421 */, -/* 422 */, -/* 423 */, -/* 424 */, -/* 425 */, -/* 426 */, -/* 427 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -// Older verions of Node.js might not have `util.getSystemErrorName()`. -// In that case, fall back to a deprecated internal. -const util = __webpack_require__(669); - -let uv; - -if (typeof util.getSystemErrorName === 'function') { - module.exports = util.getSystemErrorName; -} else { - try { - uv = process.binding('uv'); - - if (typeof uv.errname !== 'function') { - throw new TypeError('uv.errname is not a function'); - } - } catch (err) { - console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); - uv = null; - } - - module.exports = code => errname(uv, code); -} - -// Used for testing the fallback behavior -module.exports.__test__ = errname; - -function errname(uv, code) { - if (uv) { - return uv.errname(code); - } - - if (!(code < 0)) { - throw new Error('err >= 0'); - } - - return `Unknown system error ${code}`; -} - - - -/***/ }), -/* 428 */, -/* 429 */, -/* 430 */, -/* 431 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const os = __importStar(__webpack_require__(87)); -const utils_1 = __webpack_require__(82); -/** - * Commands - * - * Command Format: - * ::name key=value,key=value::message - * - * Examples: - * ::warning::This is the message - * ::set-env name=MY_VAR::some value - */ -function issueCommand(command, properties, message) { - const cmd = new Command(command, properties, message); - process.stdout.write(cmd.toString() + os.EOL); -} -exports.issueCommand = issueCommand; -function issue(name, message = '') { - issueCommand(name, {}, message); -} -exports.issue = issue; -const CMD_STRING = '::'; -class Command { - constructor(command, properties, message) { - if (!command) { - command = 'missing.command'; - } - this.command = command; - this.properties = properties; - this.message = message; - } - toString() { - let cmdStr = CMD_STRING + this.command; - if (this.properties && Object.keys(this.properties).length > 0) { - cmdStr += ' '; - let first = true; - for (const key in this.properties) { - if (this.properties.hasOwnProperty(key)) { - const val = this.properties[key]; - if (val) { - if (first) { - first = false; - } - else { - cmdStr += ','; - } - cmdStr += `${key}=${escapeProperty(val)}`; - } - } - } - } - cmdStr += `${CMD_STRING}${escapeData(this.message)}`; - return cmdStr; - } -} -function escapeData(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A'); -} -function escapeProperty(s) { - return utils_1.toCommandValue(s) - .replace(/%/g, '%25') - .replace(/\r/g, '%0D') - .replace(/\n/g, '%0A') - .replace(/:/g, '%3A') - .replace(/,/g, '%2C'); -} -//# sourceMappingURL=command.js.map - -/***/ }), -/* 432 */, -/* 433 */, -/* 434 */, -/* 435 */, -/* 436 */ -/***/ (function(module) { - -"use strict"; - - - -function isNothing(subject) { - return (typeof subject === 'undefined') || (subject === null); -} - - -function isObject(subject) { - return (typeof subject === 'object') && (subject !== null); -} - - -function toArray(sequence) { - if (Array.isArray(sequence)) return sequence; - else if (isNothing(sequence)) return []; - - return [ sequence ]; -} - - -function extend(target, source) { - var index, length, key, sourceKeys; - - if (source) { - sourceKeys = Object.keys(source); - - for (index = 0, length = sourceKeys.length; index < length; index += 1) { - key = sourceKeys[index]; - target[key] = source[key]; - } - } - - return target; -} - - -function repeat(string, count) { - var result = '', cycle; - - for (cycle = 0; cycle < count; cycle += 1) { - result += string; - } - - return result; -} - - -function isNegativeZero(number) { - return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); -} - - -module.exports.isNothing = isNothing; -module.exports.isObject = isObject; -module.exports.toArray = toArray; -module.exports.repeat = repeat; -module.exports.isNegativeZero = isNegativeZero; -module.exports.extend = extend; - - -/***/ }), -/* 437 */, -/* 438 */, -/* 439 */, -/* 440 */, -/* 441 */, -/* 442 */, -/* 443 */, -/* 444 */, -/* 445 */, -/* 446 */, -/* 447 */, -/* 448 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -var universalUserAgent = __webpack_require__(796); -var beforeAfterHook = __webpack_require__(12); -var request = __webpack_require__(753); -var graphql = __webpack_require__(898); -var authToken = __webpack_require__(813); - -function _defineProperty(obj, key, value) { - if (key in obj) { - Object.defineProperty(obj, key, { - value: value, - enumerable: true, - configurable: true, - writable: true - }); - } else { - obj[key] = value; - } - - return obj; -} - -function ownKeys(object, enumerableOnly) { - var keys = Object.keys(object); - - if (Object.getOwnPropertySymbols) { - var symbols = Object.getOwnPropertySymbols(object); - if (enumerableOnly) symbols = symbols.filter(function (sym) { - return Object.getOwnPropertyDescriptor(object, sym).enumerable; - }); - keys.push.apply(keys, symbols); - } - - return keys; -} - -function _objectSpread2(target) { - for (var i = 1; i < arguments.length; i++) { - var source = arguments[i] != null ? arguments[i] : {}; - - if (i % 2) { - ownKeys(Object(source), true).forEach(function (key) { - _defineProperty(target, key, source[key]); - }); - } else if (Object.getOwnPropertyDescriptors) { - Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); - } else { - ownKeys(Object(source)).forEach(function (key) { - Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); - }); - } - } - - return target; -} - -const VERSION = "2.5.4"; - -class Octokit { - constructor(options = {}) { - const hook = new beforeAfterHook.Collection(); - const requestDefaults = { - baseUrl: request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; // prepend default user agent with `options.userAgent` if set - - requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); - - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; - } - - this.request = request.request.defaults(requestDefaults); - this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { - baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") - })); - this.log = Object.assign({ - debug: () => {}, - info: () => {}, - warn: console.warn.bind(console), - error: console.error.bind(console) - }, options.log); - this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance - // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. - // (2) If only `options.auth` is set, use the default token authentication strategy. - // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. - // TODO: type `options.auth` based on `options.authStrategy`. - - if (!options.authStrategy) { - if (!options.auth) { - // (1) - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - // (2) - const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const auth = options.authStrategy(Object.assign({ - request: this.request - }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ - - hook.wrap("request", auth.hook); - this.auth = auth; - } // apply plugins - // https://stackoverflow.com/a/16345172 - - - const classConstructor = this.constructor; - classConstructor.plugins.forEach(plugin => { - Object.assign(this, plugin(this, options)); - }); - } - - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null)); - } - - }; - return OctokitWithDefaults; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - - - static plugin(p1, ...p2) { - var _a; - - if (p1 instanceof Array) { - console.warn(["Passing an array of plugins to Octokit.plugin() has been deprecated.", "Instead of:", " Octokit.plugin([plugin1, plugin2, ...])", "Use:", " Octokit.plugin(plugin1, plugin2, ...)"].join("\n")); - } - - const currentPlugins = this.plugins; - let newPlugins = [...(p1 instanceof Array ? p1 : [p1]), ...p2]; - const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); - return NewOctokit; - } - -} -Octokit.VERSION = VERSION; -Octokit.plugins = []; - -exports.Octokit = Octokit; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 449 */, -/* 450 */, -/* 451 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; -// JS-YAML's default schema for `load` function. -// It is not described in the YAML specification. -// -// This schema is based on JS-YAML's default safe schema and includes -// JavaScript-specific types: !!js/undefined, !!js/regexp and !!js/function. -// -// Also this schema is used as default base schema at `Schema.create` function. - - - - - -var Schema = __webpack_require__(917); - - -module.exports = Schema.DEFAULT = new Schema({ - include: [ - __webpack_require__(829) - ], - explicit: [ - __webpack_require__(523), - __webpack_require__(248), - __webpack_require__(570) - ] -}); - - -/***/ }), -/* 452 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const utils_1 = __webpack_require__(870); -/** - * Used for managing http clients during either upload or download - */ -class HttpManager { - constructor(clientCount, userAgent) { - if (clientCount < 1) { - throw new Error('There must be at least one client'); - } - this.userAgent = userAgent; - this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); - } - getClient(index) { - return this.clients[index]; - } - // client disposal is necessary if a keep-alive connection is used to properly close the connection - // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 - disposeAndReplaceClient(index) { - this.clients[index].dispose(); - this.clients[index] = utils_1.createHttpClient(this.userAgent); - } - disposeAndReplaceAllClients() { - for (const [index] of this.clients.entries()) { - this.disposeAndReplaceClient(index); - } - } -} -exports.HttpManager = HttpManager; -//# sourceMappingURL=http-manager.js.map - -/***/ }), -/* 453 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(49) -var eos = __webpack_require__(562) -var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes - -var noop = function () {} -var ancient = /^v?\.0/.test(process.version) - -var isFn = function (fn) { - return typeof fn === 'function' -} - -var isFS = function (stream) { - if (!ancient) return false // newer node version do not need to care about fs is a special way - if (!fs) return false // browser - return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) -} - -var isRequest = function (stream) { - return stream.setHeader && isFn(stream.abort) -} - -var destroyer = function (stream, reading, writing, callback) { - callback = once(callback) - - var closed = false - stream.on('close', function () { - closed = true - }) - - eos(stream, {readable: reading, writable: writing}, function (err) { - if (err) return callback(err) - closed = true - callback() - }) - - var destroyed = false - return function (err) { - if (closed) return - if (destroyed) return - destroyed = true - - if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks - if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want - - if (isFn(stream.destroy)) return stream.destroy() - - callback(err || new Error('stream was destroyed')) - } -} - -var call = function (fn) { - fn() -} - -var pipe = function (from, to) { - return from.pipe(to) -} - -var pump = function () { - var streams = Array.prototype.slice.call(arguments) - var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop - - if (Array.isArray(streams[0])) streams = streams[0] - if (streams.length < 2) throw new Error('pump requires two streams per minimum') - - var error - var destroys = streams.map(function (stream, i) { - var reading = i < streams.length - 1 - var writing = i > 0 - return destroyer(stream, reading, writing, function (err) { - if (!error) error = err - if (err) destroys.forEach(call) - if (reading) return - destroys.forEach(call) - callback(error) - }) - }) - - return streams.reduce(pipe) -} - -module.exports = pump - - -/***/ }), -/* 454 */ -/***/ (function(module, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var Stream = _interopDefault(__webpack_require__(794)); -var http = _interopDefault(__webpack_require__(605)); -var Url = _interopDefault(__webpack_require__(835)); -var https = _interopDefault(__webpack_require__(211)); -var zlib = _interopDefault(__webpack_require__(761)); - -// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js - -// fix for "Readable" isn't a named export issue -const Readable = Stream.Readable; - -const BUFFER = Symbol('buffer'); -const TYPE = Symbol('type'); - -class Blob { - constructor() { - this[TYPE] = ''; - - const blobParts = arguments[0]; - const options = arguments[1]; - - const buffers = []; - let size = 0; - - if (blobParts) { - const a = blobParts; - const length = Number(a.length); - for (let i = 0; i < length; i++) { - const element = a[i]; - let buffer; - if (element instanceof Buffer) { - buffer = element; - } else if (ArrayBuffer.isView(element)) { - buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); - } else if (element instanceof ArrayBuffer) { - buffer = Buffer.from(element); - } else if (element instanceof Blob) { - buffer = element[BUFFER]; - } else { - buffer = Buffer.from(typeof element === 'string' ? element : String(element)); - } - size += buffer.length; - buffers.push(buffer); - } - } - - this[BUFFER] = Buffer.concat(buffers); - - let type = options && options.type !== undefined && String(options.type).toLowerCase(); - if (type && !/[^\u0020-\u007E]/.test(type)) { - this[TYPE] = type; - } - } - get size() { - return this[BUFFER].length; - } - get type() { - return this[TYPE]; - } - text() { - return Promise.resolve(this[BUFFER].toString()); - } - arrayBuffer() { - const buf = this[BUFFER]; - const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - return Promise.resolve(ab); - } - stream() { - const readable = new Readable(); - readable._read = function () {}; - readable.push(this[BUFFER]); - readable.push(null); - return readable; - } - toString() { - return '[object Blob]'; - } - slice() { - const size = this.size; - - const start = arguments[0]; - const end = arguments[1]; - let relativeStart, relativeEnd; - if (start === undefined) { - relativeStart = 0; - } else if (start < 0) { - relativeStart = Math.max(size + start, 0); - } else { - relativeStart = Math.min(start, size); - } - if (end === undefined) { - relativeEnd = size; - } else if (end < 0) { - relativeEnd = Math.max(size + end, 0); - } else { - relativeEnd = Math.min(end, size); - } - const span = Math.max(relativeEnd - relativeStart, 0); - - const buffer = this[BUFFER]; - const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); - const blob = new Blob([], { type: arguments[2] }); - blob[BUFFER] = slicedBuffer; - return blob; - } -} - -Object.defineProperties(Blob.prototype, { - size: { enumerable: true }, - type: { enumerable: true }, - slice: { enumerable: true } -}); - -Object.defineProperty(Blob.prototype, Symbol.toStringTag, { - value: 'Blob', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * fetch-error.js - * - * FetchError interface for operational errors - */ - -/** - * Create FetchError instance - * - * @param String message Error message for human - * @param String type Error type for machine - * @param String systemError For Node.js system error - * @return FetchError - */ -function FetchError(message, type, systemError) { - Error.call(this, message); - - this.message = message; - this.type = type; - - // when err.type is `system`, err.code contains system error code - if (systemError) { - this.code = this.errno = systemError.code; - } - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -FetchError.prototype = Object.create(Error.prototype); -FetchError.prototype.constructor = FetchError; -FetchError.prototype.name = 'FetchError'; - -let convert; -try { - convert = __webpack_require__(545).convert; -} catch (e) {} - -const INTERNALS = Symbol('Body internals'); - -// fix an issue where "PassThrough" isn't a named export for node <10 -const PassThrough = Stream.PassThrough; - -/** - * Body mixin - * - * Ref: https://fetch.spec.whatwg.org/#body - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -function Body(body) { - var _this = this; - - var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, - _ref$size = _ref.size; - - let size = _ref$size === undefined ? 0 : _ref$size; - var _ref$timeout = _ref.timeout; - let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; - - if (body == null) { - // body is undefined or null - body = null; - } else if (isURLSearchParams(body)) { - // body is a URLSearchParams - body = Buffer.from(body.toString()); - } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { - // body is ArrayBuffer - body = Buffer.from(body); - } else if (ArrayBuffer.isView(body)) { - // body is ArrayBufferView - body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); - } else if (body instanceof Stream) ; else { - // none of the above - // coerce to string then buffer - body = Buffer.from(String(body)); - } - this[INTERNALS] = { - body, - disturbed: false, - error: null - }; - this.size = size; - this.timeout = timeout; - - if (body instanceof Stream) { - body.on('error', function (err) { - const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); - _this[INTERNALS].error = error; - }); - } -} - -Body.prototype = { - get body() { - return this[INTERNALS].body; - }, - - get bodyUsed() { - return this[INTERNALS].disturbed; - }, - - /** - * Decode response as ArrayBuffer - * - * @return Promise - */ - arrayBuffer() { - return consumeBody.call(this).then(function (buf) { - return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); - }); - }, - - /** - * Return raw response as Blob - * - * @return Promise - */ - blob() { - let ct = this.headers && this.headers.get('content-type') || ''; - return consumeBody.call(this).then(function (buf) { - return Object.assign( - // Prevent copying - new Blob([], { - type: ct.toLowerCase() - }), { - [BUFFER]: buf - }); - }); - }, - - /** - * Decode response as json - * - * @return Promise - */ - json() { - var _this2 = this; - - return consumeBody.call(this).then(function (buffer) { - try { - return JSON.parse(buffer.toString()); - } catch (err) { - return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); - } - }); - }, - - /** - * Decode response as text - * - * @return Promise - */ - text() { - return consumeBody.call(this).then(function (buffer) { - return buffer.toString(); - }); - }, - - /** - * Decode response as buffer (non-spec api) - * - * @return Promise - */ - buffer() { - return consumeBody.call(this); - }, - - /** - * Decode response as text, while automatically detecting the encoding and - * trying to decode to UTF-8 (non-spec api) - * - * @return Promise - */ - textConverted() { - var _this3 = this; - - return consumeBody.call(this).then(function (buffer) { - return convertBody(buffer, _this3.headers); - }); - } -}; - -// In browsers, all properties are enumerable. -Object.defineProperties(Body.prototype, { - body: { enumerable: true }, - bodyUsed: { enumerable: true }, - arrayBuffer: { enumerable: true }, - blob: { enumerable: true }, - json: { enumerable: true }, - text: { enumerable: true } -}); - -Body.mixIn = function (proto) { - for (const name of Object.getOwnPropertyNames(Body.prototype)) { - // istanbul ignore else: future proof - if (!(name in proto)) { - const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); - Object.defineProperty(proto, name, desc); - } - } -}; - -/** - * Consume and convert an entire Body to a Buffer. - * - * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body - * - * @return Promise - */ -function consumeBody() { - var _this4 = this; - - if (this[INTERNALS].disturbed) { - return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); - } - - this[INTERNALS].disturbed = true; - - if (this[INTERNALS].error) { - return Body.Promise.reject(this[INTERNALS].error); - } - - let body = this.body; - - // body is null - if (body === null) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is blob - if (isBlob(body)) { - body = body.stream(); - } - - // body is buffer - if (Buffer.isBuffer(body)) { - return Body.Promise.resolve(body); - } - - // istanbul ignore if: should never happen - if (!(body instanceof Stream)) { - return Body.Promise.resolve(Buffer.alloc(0)); - } - - // body is stream - // get ready to actually consume the body - let accum = []; - let accumBytes = 0; - let abort = false; - - return new Body.Promise(function (resolve, reject) { - let resTimeout; - - // allow timeout on slow response body - if (_this4.timeout) { - resTimeout = setTimeout(function () { - abort = true; - reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); - }, _this4.timeout); - } - - // handle stream errors - body.on('error', function (err) { - if (err.name === 'AbortError') { - // if the request was aborted, reject with this Error - abort = true; - reject(err); - } else { - // other errors, such as incorrect content-encoding - reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - - body.on('data', function (chunk) { - if (abort || chunk === null) { - return; - } - - if (_this4.size && accumBytes + chunk.length > _this4.size) { - abort = true; - reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); - return; - } - - accumBytes += chunk.length; - accum.push(chunk); - }); - - body.on('end', function () { - if (abort) { - return; - } - - clearTimeout(resTimeout); - - try { - resolve(Buffer.concat(accum, accumBytes)); - } catch (err) { - // handle streams that have accumulated too much data (issue #414) - reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); - } - }); - }); -} - -/** - * Detect buffer encoding and convert to target encoding - * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding - * - * @param Buffer buffer Incoming buffer - * @param String encoding Target encoding - * @return String - */ -function convertBody(buffer, headers) { - if (typeof convert !== 'function') { - throw new Error('The package `encoding` must be installed to use the textConverted() function'); - } - - const ct = headers.get('content-type'); - let charset = 'utf-8'; - let res, str; - - // header - if (ct) { - res = /charset=([^;]*)/i.exec(ct); - } - - // no charset in content type, peek at response body for at most 1024 bytes - str = buffer.slice(0, 1024).toString(); - - // html5 - if (!res && str) { - res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; - - this[MAP] = Object.create(null); - - if (init instanceof Headers) { - const rawHeaders = init.raw(); - const headerNames = Object.keys(rawHeaders); - - for (const headerName of headerNames) { - for (const value of rawHeaders[headerName]) { - this.append(headerName, value); - } - } - - return; - } - - // We don't worry about converting prop to ByteString here as append() - // will handle it. - if (init == null) ; else if (typeof init === 'object') { - const method = init[Symbol.iterator]; - if (method != null) { - if (typeof method !== 'function') { - throw new TypeError('Header pairs must be iterable'); - } - - // sequence> - // Note: per spec we have to first exhaust the lists then process them - const pairs = []; - for (const pair of init) { - if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { - throw new TypeError('Each header pair must be iterable'); - } - pairs.push(Array.from(pair)); - } - - for (const pair of pairs) { - if (pair.length !== 2) { - throw new TypeError('Each header pair must be a name/value tuple'); - } - this.append(pair[0], pair[1]); - } - } else { - // record - for (const key of Object.keys(init)) { - const value = init[key]; - this.append(key, value); - } - } - } else { - throw new TypeError('Provided initializer must be an object'); - } - } - - /** - * Return combined header value given name - * - * @param String name Header name - * @return Mixed - */ - get(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key === undefined) { - return null; - } - - return this[MAP][key].join(', '); - } - - /** - * Iterate over all headers - * - * @param Function callback Executed for each item with parameters (value, name, thisArg) - * @param Boolean thisArg `this` context for callback function - * @return Void - */ - forEach(callback) { - let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; - - let pairs = getHeaders(this); - let i = 0; - while (i < pairs.length) { - var _pairs$i = pairs[i]; - const name = _pairs$i[0], - value = _pairs$i[1]; - - callback.call(thisArg, value, name, this); - pairs = getHeaders(this); - i++; - } - } - - /** - * Overwrite header values given name - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - set(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - this[MAP][key !== undefined ? key : name] = [value]; - } - - /** - * Append a value onto existing header - * - * @param String name Header name - * @param String value Header value - * @return Void - */ - append(name, value) { - name = `${name}`; - value = `${value}`; - validateName(name); - validateValue(value); - const key = find(this[MAP], name); - if (key !== undefined) { - this[MAP][key].push(value); - } else { - this[MAP][name] = [value]; - } - } - - /** - * Check for header name existence - * - * @param String name Header name - * @return Boolean - */ - has(name) { - name = `${name}`; - validateName(name); - return find(this[MAP], name) !== undefined; - } - - /** - * Delete all header values given name - * - * @param String name Header name - * @return Void - */ - delete(name) { - name = `${name}`; - validateName(name); - const key = find(this[MAP], name); - if (key !== undefined) { - delete this[MAP][key]; - } - } - - /** - * Return raw headers (non-spec api) - * - * @return Object - */ - raw() { - return this[MAP]; - } - - /** - * Get an iterator on keys. - * - * @return Iterator - */ - keys() { - return createHeadersIterator(this, 'key'); - } - - /** - * Get an iterator on values. - * - * @return Iterator - */ - values() { - return createHeadersIterator(this, 'value'); - } - - /** - * Get an iterator on entries. - * - * This is the default iterator of the Headers object. - * - * @return Iterator - */ - [Symbol.iterator]() { - return createHeadersIterator(this, 'key+value'); - } -} -Headers.prototype.entries = Headers.prototype[Symbol.iterator]; - -Object.defineProperty(Headers.prototype, Symbol.toStringTag, { - value: 'Headers', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Headers.prototype, { - get: { enumerable: true }, - forEach: { enumerable: true }, - set: { enumerable: true }, - append: { enumerable: true }, - has: { enumerable: true }, - delete: { enumerable: true }, - keys: { enumerable: true }, - values: { enumerable: true }, - entries: { enumerable: true } -}); - -function getHeaders(headers) { - let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; - - const keys = Object.keys(headers[MAP]).sort(); - return keys.map(kind === 'key' ? function (k) { - return k.toLowerCase(); - } : kind === 'value' ? function (k) { - return headers[MAP][k].join(', '); - } : function (k) { - return [k.toLowerCase(), headers[MAP][k].join(', ')]; - }); -} - -const INTERNAL = Symbol('internal'); - -function createHeadersIterator(target, kind) { - const iterator = Object.create(HeadersIteratorPrototype); - iterator[INTERNAL] = { - target, - kind, - index: 0 - }; - return iterator; -} - -const HeadersIteratorPrototype = Object.setPrototypeOf({ - next() { - // istanbul ignore if - if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { - throw new TypeError('Value of `this` is not a HeadersIterator'); - } - - var _INTERNAL = this[INTERNAL]; - const target = _INTERNAL.target, - kind = _INTERNAL.kind, - index = _INTERNAL.index; - - const values = getHeaders(target, kind); - const len = values.length; - if (index >= len) { - return { - value: undefined, - done: true - }; - } - - this[INTERNAL].index = index + 1; - - return { - value: values[index], - done: false - }; - } -}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); - -Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { - value: 'HeadersIterator', - writable: false, - enumerable: false, - configurable: true -}); - -/** - * Export the Headers object in a form that Node.js can consume. - * - * @param Headers headers - * @return Object - */ -function exportNodeCompatibleHeaders(headers) { - const obj = Object.assign({ __proto__: null }, headers[MAP]); - - // http.request() only supports string as Host header. This hack makes - // specifying custom Host header possible. - const hostHeaderKey = find(headers[MAP], 'Host'); - if (hostHeaderKey !== undefined) { - obj[hostHeaderKey] = obj[hostHeaderKey][0]; - } - - return obj; -} - -/** - * Create a Headers object from an object of headers, ignoring those that do - * not conform to HTTP grammar productions. - * - * @param Object obj Object of headers - * @return Headers - */ -function createHeadersLenient(obj) { - const headers = new Headers(); - for (const name of Object.keys(obj)) { - if (invalidTokenRegex.test(name)) { - continue; - } - if (Array.isArray(obj[name])) { - for (const val of obj[name]) { - if (invalidHeaderCharRegex.test(val)) { - continue; - } - if (headers[MAP][name] === undefined) { - headers[MAP][name] = [val]; - } else { - headers[MAP][name].push(val); - } - } - } else if (!invalidHeaderCharRegex.test(obj[name])) { - headers[MAP][name] = [obj[name]]; - } - } - return headers; -} - -const INTERNALS$1 = Symbol('Response internals'); - -// fix an issue where "STATUS_CODES" aren't a named export for node <10 -const STATUS_CODES = http.STATUS_CODES; - -/** - * Response class - * - * @param Stream body Readable stream - * @param Object opts Response options - * @return Void - */ -class Response { - constructor() { - let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; - let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - Body.call(this, body, opts); - - const status = opts.status || 200; - const headers = new Headers(opts.headers); - - if (body != null && !headers.has('Content-Type')) { - const contentType = extractContentType(body); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - this[INTERNALS$1] = { - url: opts.url, - status, - statusText: opts.statusText || STATUS_CODES[status], - headers, - counter: opts.counter - }; - } - - get url() { - return this[INTERNALS$1].url || ''; - } - - get status() { - return this[INTERNALS$1].status; - } - - /** - * Convenience property representing if the request ended normally - */ - get ok() { - return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; - } - - get redirected() { - return this[INTERNALS$1].counter > 0; - } - - get statusText() { - return this[INTERNALS$1].statusText; - } - - get headers() { - return this[INTERNALS$1].headers; - } - - /** - * Clone this response - * - * @return Response - */ - clone() { - return new Response(clone(this), { - url: this.url, - status: this.status, - statusText: this.statusText, - headers: this.headers, - ok: this.ok, - redirected: this.redirected - }); - } -} - -Body.mixIn(Response.prototype); - -Object.defineProperties(Response.prototype, { - url: { enumerable: true }, - status: { enumerable: true }, - ok: { enumerable: true }, - redirected: { enumerable: true }, - statusText: { enumerable: true }, - headers: { enumerable: true }, - clone: { enumerable: true } -}); - -Object.defineProperty(Response.prototype, Symbol.toStringTag, { - value: 'Response', - writable: false, - enumerable: false, - configurable: true -}); - -const INTERNALS$2 = Symbol('Request internals'); - -// fix an issue where "format", "parse" aren't a named export for node <10 -const parse_url = Url.parse; -const format_url = Url.format; - -const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; - -/** - * Check if a value is an instance of Request. - * - * @param Mixed input - * @return Boolean - */ -function isRequest(input) { - return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; -} - -function isAbortSignal(signal) { - const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); - return !!(proto && proto.constructor.name === 'AbortSignal'); -} - -/** - * Request class - * - * @param Mixed input Url or Request instance - * @param Object init Custom options - * @return Void - */ -class Request { - constructor(input) { - let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; - - let parsedURL; - - // normalize input - if (!isRequest(input)) { - if (input && input.href) { - // in order to support Node.js' Url objects; though WHATWG's URL objects - // will fall into this branch also (since their `toString()` will return - // `href` property anyway) - parsedURL = parse_url(input.href); - } else { - // coerce input to a string before attempting to parse - parsedURL = parse_url(`${input}`); - } - input = {}; - } else { - parsedURL = parse_url(input.url); - } - - let method = init.method || input.method || 'GET'; - method = method.toUpperCase(); - - if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { - throw new TypeError('Request with GET/HEAD method cannot have body'); - } - - let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; - - Body.call(this, inputBody, { - timeout: init.timeout || input.timeout || 0, - size: init.size || input.size || 0 - }); - - const headers = new Headers(init.headers || input.headers || {}); - - if (inputBody != null && !headers.has('Content-Type')) { - const contentType = extractContentType(inputBody); - if (contentType) { - headers.append('Content-Type', contentType); - } - } - - let signal = isRequest(input) ? input.signal : null; - if ('signal' in init) signal = init.signal; - - if (signal != null && !isAbortSignal(signal)) { - throw new TypeError('Expected signal to be an instanceof AbortSignal'); - } - - this[INTERNALS$2] = { - method, - redirect: init.redirect || input.redirect || 'follow', - headers, - parsedURL, - signal - }; - - // node-fetch-only options - this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; - this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; - this.counter = init.counter || input.counter || 0; - this.agent = init.agent || input.agent; - } - - get method() { - return this[INTERNALS$2].method; - } - - get url() { - return format_url(this[INTERNALS$2].parsedURL); - } - - get headers() { - return this[INTERNALS$2].headers; - } - - get redirect() { - return this[INTERNALS$2].redirect; - } - - get signal() { - return this[INTERNALS$2].signal; - } - - /** - * Clone this request - * - * @return Request - */ - clone() { - return new Request(this); - } -} - -Body.mixIn(Request.prototype); - -Object.defineProperty(Request.prototype, Symbol.toStringTag, { - value: 'Request', - writable: false, - enumerable: false, - configurable: true -}); - -Object.defineProperties(Request.prototype, { - method: { enumerable: true }, - url: { enumerable: true }, - headers: { enumerable: true }, - redirect: { enumerable: true }, - clone: { enumerable: true }, - signal: { enumerable: true } -}); - -/** - * Convert a Request to Node.js http request options. - * - * @param Request A Request instance - * @return Object The options object to be passed to http.request - */ -function getNodeRequestOptions(request) { - const parsedURL = request[INTERNALS$2].parsedURL; - const headers = new Headers(request[INTERNALS$2].headers); - - // fetch step 1.3 - if (!headers.has('Accept')) { - headers.set('Accept', '*/*'); - } - - // Basic fetch - if (!parsedURL.protocol || !parsedURL.hostname) { - throw new TypeError('Only absolute URLs are supported'); - } - - if (!/^https?:$/.test(parsedURL.protocol)) { - throw new TypeError('Only HTTP(S) protocols are supported'); - } - - if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { - throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); - } - - // HTTP-network-or-cache fetch steps 2.4-2.7 - let contentLengthValue = null; - if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { - contentLengthValue = '0'; - } - if (request.body != null) { - const totalBytes = getTotalBytes(request); - if (typeof totalBytes === 'number') { - contentLengthValue = String(totalBytes); - } - } - if (contentLengthValue) { - headers.set('Content-Length', contentLengthValue); - } - - // HTTP-network-or-cache fetch step 2.11 - if (!headers.has('User-Agent')) { - headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); - } - - // HTTP-network-or-cache fetch step 2.15 - if (request.compress && !headers.has('Accept-Encoding')) { - headers.set('Accept-Encoding', 'gzip,deflate'); - } - - let agent = request.agent; - if (typeof agent === 'function') { - agent = agent(parsedURL); - } - - if (!headers.has('Connection') && !agent) { - headers.set('Connection', 'close'); - } - - // HTTP-network fetch step 4.2 - // chunked encoding is handled by Node.js - - return Object.assign({}, parsedURL, { - method: request.method, - headers: exportNodeCompatibleHeaders(headers), - agent - }); -} - -/** - * abort-error.js - * - * AbortError interface for cancelled requests - */ - -/** - * Create AbortError instance - * - * @param String message Error message for human - * @return AbortError - */ -function AbortError(message) { - Error.call(this, message); - - this.type = 'aborted'; - this.message = message; - - // hide custom error implementation details from end-users - Error.captureStackTrace(this, this.constructor); -} - -AbortError.prototype = Object.create(Error.prototype); -AbortError.prototype.constructor = AbortError; -AbortError.prototype.name = 'AbortError'; - -// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 -const PassThrough$1 = Stream.PassThrough; -const resolve_url = Url.resolve; - -/** - * Fetch function - * - * @param Mixed url Absolute url or Request instance - * @param Object opts Fetch options - * @return Promise - */ -function fetch(url, opts) { - - // allow custom promise - if (!fetch.Promise) { - throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); - } - - Body.Promise = fetch.Promise; - - // wrap http.request into fetch - return new fetch.Promise(function (resolve, reject) { - // build request object - const request = new Request(url, opts); - const options = getNodeRequestOptions(request); - - const send = (options.protocol === 'https:' ? https : http).request; - const signal = request.signal; - - let response = null; - - const abort = function abort() { - let error = new AbortError('The user aborted a request.'); - reject(error); - if (request.body && request.body instanceof Stream.Readable) { - request.body.destroy(error); - } - if (!response || !response.body) return; - response.body.emit('error', error); - }; - - if (signal && signal.aborted) { - abort(); - return; - } - - const abortAndFinalize = function abortAndFinalize() { - abort(); - finalize(); - }; - - // send request - const req = send(options); - let reqTimeout; - - if (signal) { - signal.addEventListener('abort', abortAndFinalize); - } - - function finalize() { - req.abort(); - if (signal) signal.removeEventListener('abort', abortAndFinalize); - clearTimeout(reqTimeout); - } - - if (request.timeout) { - req.once('socket', function (socket) { - reqTimeout = setTimeout(function () { - reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); - finalize(); - }, request.timeout); - }); - } - - req.on('error', function (err) { - reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); - finalize(); - }); - - req.on('response', function (res) { - clearTimeout(reqTimeout); - - const headers = createHeadersLenient(res.headers); - - // HTTP fetch step 5 - if (fetch.isRedirect(res.statusCode)) { - // HTTP fetch step 5.2 - const location = headers.get('Location'); - - // HTTP fetch step 5.3 - const locationURL = location === null ? null : resolve_url(request.url, location); - - // HTTP fetch step 5.5 - switch (request.redirect) { - case 'error': - reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); - finalize(); - return; - case 'manual': - // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. - if (locationURL !== null) { - // handle corrupted header - try { - headers.set('Location', locationURL); - } catch (err) { - // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request - reject(err); - } - } - break; - case 'follow': - // HTTP-redirect fetch step 2 - if (locationURL === null) { - break; - } - - // HTTP-redirect fetch step 5 - if (request.counter >= request.follow) { - reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 6 (counter increment) - // Create a new Request object. - const requestOpts = { - headers: new Headers(request.headers), - follow: request.follow, - counter: request.counter + 1, - agent: request.agent, - compress: request.compress, - method: request.method, - body: request.body, - signal: request.signal, - timeout: request.timeout, - size: request.size - }; - - // HTTP-redirect fetch step 9 - if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { - reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); - finalize(); - return; - } - - // HTTP-redirect fetch step 11 - if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { - requestOpts.method = 'GET'; - requestOpts.body = undefined; - requestOpts.headers.delete('content-length'); - } - - // HTTP-redirect fetch step 15 - resolve(fetch(new Request(locationURL, requestOpts))); - finalize(); - return; - } - } - - // prepare response - res.once('end', function () { - if (signal) signal.removeEventListener('abort', abortAndFinalize); - }); - let body = res.pipe(new PassThrough$1()); - - const response_options = { - url: request.url, - status: res.statusCode, - statusText: res.statusMessage, - headers: headers, - size: request.size, - timeout: request.timeout, - counter: request.counter - }; - - // HTTP-network fetch step 12.1.1.3 - const codings = headers.get('Content-Encoding'); - - // HTTP-network fetch step 12.1.1.4: handle content codings - - // in following scenarios we ignore compression support - // 1. compression support is disabled - // 2. HEAD request - // 3. no Content-Encoding header - // 4. no content response (204) - // 5. content not modified response (304) - if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { - response = new Response(body, response_options); - resolve(response); - return; - } - - // For Node v6+ - // Be less strict when decoding compressed responses, since sometimes - // servers send slightly invalid responses that are still accepted - // by common browsers. - // Always using Z_SYNC_FLUSH is what cURL does. - const zlibOptions = { - flush: zlib.Z_SYNC_FLUSH, - finishFlush: zlib.Z_SYNC_FLUSH - }; - - // for gzip - if (codings == 'gzip' || codings == 'x-gzip') { - body = body.pipe(zlib.createGunzip(zlibOptions)); - response = new Response(body, response_options); - resolve(response); - return; - } - - // for deflate - if (codings == 'deflate' || codings == 'x-deflate') { - // handle the infamous raw deflate response from old servers - // a hack for old IIS and Apache servers - const raw = res.pipe(new PassThrough$1()); - raw.once('data', function (chunk) { - // see http://stackoverflow.com/questions/37519828 - if ((chunk[0] & 0x0F) === 0x08) { - body = body.pipe(zlib.createInflate()); - } else { - body = body.pipe(zlib.createInflateRaw()); - } - response = new Response(body, response_options); - resolve(response); - }); - return; - } - - // for br - if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { - body = body.pipe(zlib.createBrotliDecompress()); - response = new Response(body, response_options); - resolve(response); - return; - } - - // otherwise, use response as-is - response = new Response(body, response_options); - resolve(response); - }); - - writeToStream(req, request); - }); -} -/** - * Redirect code matching - * - * @param Number code Status code - * @return Boolean - */ -fetch.isRedirect = function (code) { - return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; -}; - -// expose Promise -fetch.Promise = global.Promise; - -module.exports = exports = fetch; -Object.defineProperty(exports, "__esModule", { value: true }); -exports.default = exports; -exports.Headers = Headers; -exports.Request = Request; -exports.Response = Response; -exports.FetchError = FetchError; - - -/***/ }), -/* 455 */, -/* 456 */, -/* 457 */, -/* 458 */, -/* 459 */, -/* 460 */, -/* 461 */, -/* 462 */ -/***/ (function(module) { - -"use strict"; - - -// See http://www.robvanderwoude.com/escapechars.php -const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; - -function escapeCommand(arg) { - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - return arg; -} - -function escapeArgument(arg, doubleEscapeMetaChars) { - // Convert to string - arg = `${arg}`; - - // Algorithm below is based on https://qntm.org/cmd - - // Sequence of backslashes followed by a double quote: - // double up all the backslashes and escape the double quote - arg = arg.replace(/(\\*)"/g, '$1$1\\"'); - - // Sequence of backslashes followed by the end of the string - // (which will become a double quote later): - // double up all the backslashes - arg = arg.replace(/(\\*)$/, '$1$1'); - - // All other backslashes occur literally - - // Quote the whole thing: - arg = `"${arg}"`; - - // Escape meta chars - arg = arg.replace(metaCharsRegExp, '^$1'); - - // Double escape meta chars if necessary - if (doubleEscapeMetaChars) { - arg = arg.replace(metaCharsRegExp, '^$1'); - } - - return arg; -} - -module.exports.command = escapeCommand; -module.exports.argument = escapeArgument; - - -/***/ }), -/* 463 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - - -Object.defineProperty(exports, '__esModule', { value: true }); - -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } - -var deprecation = __webpack_require__(692); -var once = _interopDefault(__webpack_require__(49)); - -const logOnce = once(deprecation => console.warn(deprecation)); -/** - * Error with extra properties to help with debugging - */ - -class RequestError extends Error { - constructor(message, statusCode, options) { - super(message); // Maintains proper stack trace (only available on V8) - - /* istanbul ignore next */ - - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - - this.name = "HttpError"; - this.status = statusCode; - Object.defineProperty(this, "code", { - get() { - logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); - return statusCode; - } - - }); - this.headers = options.headers || {}; // redact request credentials without mutating original request options - - const requestCopy = Object.assign({}, options.request); - - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") - }); - } - - requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit - // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications - .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended - // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header - .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - } - -} - -exports.RequestError = RequestError; -//# sourceMappingURL=index.js.map - - -/***/ }), -/* 464 */, -/* 465 */, -/* 466 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var Stream = __webpack_require__(794).Stream - -module.exports = legacy - -function legacy (fs) { - return { - ReadStream: ReadStream, - WriteStream: WriteStream - } - - function ReadStream (path, options) { - if (!(this instanceof ReadStream)) return new ReadStream(path, options); - - Stream.call(this); - - var self = this; - - this.path = path; - this.fd = null; - this.readable = true; - this.paused = false; - - this.flags = 'r'; - this.mode = 438; /*=0666*/ - this.bufferSize = 64 * 1024; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.encoding) this.setEncoding(this.encoding); - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.end === undefined) { - this.end = Infinity; - } else if ('number' !== typeof this.end) { - throw TypeError('end must be a Number'); - } - - if (this.start > this.end) { - throw new Error('start must be <= end'); - } - - this.pos = this.start; - } - - if (this.fd !== null) { - process.nextTick(function() { - self._read(); - }); - return; - } - - fs.open(this.path, this.flags, this.mode, function (err, fd) { - if (err) { - self.emit('error', err); - self.readable = false; - return; - } - - self.fd = fd; - self.emit('open', fd); - self._read(); - }) - } - - function WriteStream (path, options) { - if (!(this instanceof WriteStream)) return new WriteStream(path, options); - - Stream.call(this); - - this.path = path; - this.fd = null; - this.writable = true; - - this.flags = 'w'; - this.encoding = 'binary'; - this.mode = 438; /*=0666*/ - this.bytesWritten = 0; - - options = options || {}; - - // Mixin options into this - var keys = Object.keys(options); - for (var index = 0, length = keys.length; index < length; index++) { - var key = keys[index]; - this[key] = options[key]; - } - - if (this.start !== undefined) { - if ('number' !== typeof this.start) { - throw TypeError('start must be a Number'); - } - if (this.start < 0) { - throw new Error('start must be >= zero'); - } - - this.pos = this.start; - } - - this.busy = false; - this._queue = []; - - if (this.fd === null) { - this._open = fs.open; - this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); - this.flush(); - } - } -} - - -/***/ }), -/* 467 */, -/* 468 */, -/* 469 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const jsonFile = __webpack_require__(666) - -module.exports = { - // jsonfile exports - readJson: jsonFile.readFile, - readJsonSync: jsonFile.readFileSync, - writeJson: jsonFile.writeFile, - writeJsonSync: jsonFile.writeFileSync -} - - -/***/ }), -/* 470 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const command_1 = __webpack_require__(431); -const file_command_1 = __webpack_require__(102); -const utils_1 = __webpack_require__(82); -const os = __importStar(__webpack_require__(87)); -const path = __importStar(__webpack_require__(622)); -/** - * The code to exit an action - */ -var ExitCode; -(function (ExitCode) { - /** - * A code indicating that the action was successful - */ - ExitCode[ExitCode["Success"] = 0] = "Success"; - /** - * A code indicating that the action was a failure - */ - ExitCode[ExitCode["Failure"] = 1] = "Failure"; -})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); -//----------------------------------------------------------------------- -// Variables -//----------------------------------------------------------------------- -/** - * Sets env variable for this action and future actions in the job - * @param name the name of the variable to set - * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function exportVariable(name, val) { - const convertedVal = utils_1.toCommandValue(val); - process.env[name] = convertedVal; - const filePath = process.env['GITHUB_ENV'] || ''; - if (filePath) { - const delimiter = '_GitHubActionsFileCommandDelimeter_'; - const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; - file_command_1.issueCommand('ENV', commandValue); - } - else { - command_1.issueCommand('set-env', { name }, convertedVal); - } -} -exports.exportVariable = exportVariable; -/** - * Registers a secret which will get masked from logs - * @param secret value of the secret - */ -function setSecret(secret) { - command_1.issueCommand('add-mask', {}, secret); -} -exports.setSecret = setSecret; -/** - * Prepends inputPath to the PATH (for this action and future actions) - * @param inputPath - */ -function addPath(inputPath) { - const filePath = process.env['GITHUB_PATH'] || ''; - if (filePath) { - file_command_1.issueCommand('PATH', inputPath); - } - else { - command_1.issueCommand('add-path', {}, inputPath); - } - process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; -} -exports.addPath = addPath; -/** - * Gets the value of an input. The value is also trimmed. - * - * @param name name of the input to get - * @param options optional. See InputOptions. - * @returns string - */ -function getInput(name, options) { - const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; - if (options && options.required && !val) { - throw new Error(`Input required and not supplied: ${name}`); - } - return val.trim(); -} -exports.getInput = getInput; -/** - * Sets the value of an output. - * - * @param name name of the output to set - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function setOutput(name, value) { - command_1.issueCommand('set-output', { name }, value); -} -exports.setOutput = setOutput; -/** - * Enables or disables the echoing of commands into stdout for the rest of the step. - * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. - * - */ -function setCommandEcho(enabled) { - command_1.issue('echo', enabled ? 'on' : 'off'); -} -exports.setCommandEcho = setCommandEcho; -//----------------------------------------------------------------------- -// Results -//----------------------------------------------------------------------- -/** - * Sets the action status to failed. - * When the action exits it will be with an exit code of 1 - * @param message add error issue message - */ -function setFailed(message) { - process.exitCode = ExitCode.Failure; - error(message); -} -exports.setFailed = setFailed; -//----------------------------------------------------------------------- -// Logging Commands -//----------------------------------------------------------------------- -/** - * Gets whether Actions Step Debug is on or not - */ -function isDebug() { - return process.env['RUNNER_DEBUG'] === '1'; -} -exports.isDebug = isDebug; -/** - * Writes debug message to user log - * @param message debug message - */ -function debug(message) { - command_1.issueCommand('debug', {}, message); -} -exports.debug = debug; -/** - * Adds an error issue - * @param message error issue message. Errors will be converted to string via toString() - */ -function error(message) { - command_1.issue('error', message instanceof Error ? message.toString() : message); -} -exports.error = error; -/** - * Adds an warning issue - * @param message warning issue message. Errors will be converted to string via toString() - */ -function warning(message) { - command_1.issue('warning', message instanceof Error ? message.toString() : message); -} -exports.warning = warning; -/** - * Writes info to log with console.log. - * @param message info message - */ -function info(message) { - process.stdout.write(message + os.EOL); -} -exports.info = info; -/** - * Begin an output group. - * - * Output until the next `groupEnd` will be foldable in this group - * - * @param name The name of the output group - */ -function startGroup(name) { - command_1.issue('group', name); -} -exports.startGroup = startGroup; -/** - * End an output group. - */ -function endGroup() { - command_1.issue('endgroup'); -} -exports.endGroup = endGroup; -/** - * Wrap an asynchronous function call in a group. - * - * Returns the same type as the function itself. - * - * @param name The name of the group - * @param fn The function to wrap in the group - */ -function group(name, fn) { - return __awaiter(this, void 0, void 0, function* () { - startGroup(name); - let result; - try { - result = yield fn(); - } - finally { - endGroup(); - } - return result; - }); -} -exports.group = group; -//----------------------------------------------------------------------- -// Wrapper action state -//----------------------------------------------------------------------- -/** - * Saves state for current action, the state can only be retrieved by this action's post job execution. - * - * @param name name of the state to store - * @param value value to store. Non-string values will be converted to a string via JSON.stringify - */ -// eslint-disable-next-line @typescript-eslint/no-explicit-any -function saveState(name, value) { - command_1.issueCommand('save-state', { name }, value); -} -exports.saveState = saveState; -/** - * Gets the value of an state set by this action's main execution. - * - * @param name name of the state to get - * @returns string - */ -function getState(name) { - return process.env[`STATE_${name}`] || ''; -} -exports.getState = getState; -//# sourceMappingURL=core.js.map - -/***/ }), -/* 471 */, -/* 472 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const file = __webpack_require__(149) -const link = __webpack_require__(900) -const symlink = __webpack_require__(849) - -module.exports = { - // file - createFile: file.createFile, - createFileSync: file.createFileSync, - ensureFile: file.createFile, - ensureFileSync: file.createFileSync, - // link - createLink: link.createLink, - createLinkSync: link.createLinkSync, - ensureLink: link.createLink, - ensureLinkSync: link.createLinkSync, - // symlink - createSymlink: symlink.createSymlink, - createSymlinkSync: symlink.createSymlinkSync, - ensureSymlink: symlink.createSymlink, - ensureSymlinkSync: symlink.createSymlinkSync -} - - -/***/ }), -/* 473 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -/*eslint-disable no-use-before-define*/ - -var common = __webpack_require__(436); -var YAMLException = __webpack_require__(844); -var DEFAULT_FULL_SCHEMA = __webpack_require__(451); -var DEFAULT_SAFE_SCHEMA = __webpack_require__(829); - -var _toString = Object.prototype.toString; -var _hasOwnProperty = Object.prototype.hasOwnProperty; - -var CHAR_TAB = 0x09; /* Tab */ -var CHAR_LINE_FEED = 0x0A; /* LF */ -var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ -var CHAR_SPACE = 0x20; /* Space */ -var CHAR_EXCLAMATION = 0x21; /* ! */ -var CHAR_DOUBLE_QUOTE = 0x22; /* " */ -var CHAR_SHARP = 0x23; /* # */ -var CHAR_PERCENT = 0x25; /* % */ -var CHAR_AMPERSAND = 0x26; /* & */ -var CHAR_SINGLE_QUOTE = 0x27; /* ' */ -var CHAR_ASTERISK = 0x2A; /* * */ -var CHAR_COMMA = 0x2C; /* , */ -var CHAR_MINUS = 0x2D; /* - */ -var CHAR_COLON = 0x3A; /* : */ -var CHAR_EQUALS = 0x3D; /* = */ -var CHAR_GREATER_THAN = 0x3E; /* > */ -var CHAR_QUESTION = 0x3F; /* ? */ -var CHAR_COMMERCIAL_AT = 0x40; /* @ */ -var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ -var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ -var CHAR_GRAVE_ACCENT = 0x60; /* ` */ -var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ -var CHAR_VERTICAL_LINE = 0x7C; /* | */ -var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ - -var ESCAPE_SEQUENCES = {}; - -ESCAPE_SEQUENCES[0x00] = '\\0'; -ESCAPE_SEQUENCES[0x07] = '\\a'; -ESCAPE_SEQUENCES[0x08] = '\\b'; -ESCAPE_SEQUENCES[0x09] = '\\t'; -ESCAPE_SEQUENCES[0x0A] = '\\n'; -ESCAPE_SEQUENCES[0x0B] = '\\v'; -ESCAPE_SEQUENCES[0x0C] = '\\f'; -ESCAPE_SEQUENCES[0x0D] = '\\r'; -ESCAPE_SEQUENCES[0x1B] = '\\e'; -ESCAPE_SEQUENCES[0x22] = '\\"'; -ESCAPE_SEQUENCES[0x5C] = '\\\\'; -ESCAPE_SEQUENCES[0x85] = '\\N'; -ESCAPE_SEQUENCES[0xA0] = '\\_'; -ESCAPE_SEQUENCES[0x2028] = '\\L'; -ESCAPE_SEQUENCES[0x2029] = '\\P'; - -var DEPRECATED_BOOLEANS_SYNTAX = [ - 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', - 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' -]; - -function compileStyleMap(schema, map) { - var result, keys, index, length, tag, style, type; - - if (map === null) return {}; - - result = {}; - keys = Object.keys(map); - - for (index = 0, length = keys.length; index < length; index += 1) { - tag = keys[index]; - style = String(map[tag]); - - if (tag.slice(0, 2) === '!!') { - tag = 'tag:yaml.org,2002:' + tag.slice(2); - } - type = schema.compiledTypeMap['fallback'][tag]; - - if (type && _hasOwnProperty.call(type.styleAliases, style)) { - style = type.styleAliases[style]; - } - - result[tag] = style; - } - - return result; -} - -function encodeHex(character) { - var string, handle, length; - - string = character.toString(16).toUpperCase(); - - if (character <= 0xFF) { - handle = 'x'; - length = 2; - } else if (character <= 0xFFFF) { - handle = 'u'; - length = 4; - } else if (character <= 0xFFFFFFFF) { - handle = 'U'; - length = 8; - } else { - throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); - } - - return '\\' + handle + common.repeat('0', length - string.length) + string; -} - -function State(options) { - this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; - this.indent = Math.max(1, (options['indent'] || 2)); - this.noArrayIndent = options['noArrayIndent'] || false; - this.skipInvalid = options['skipInvalid'] || false; - this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); - this.styleMap = compileStyleMap(this.schema, options['styles'] || null); - this.sortKeys = options['sortKeys'] || false; - this.lineWidth = options['lineWidth'] || 80; - this.noRefs = options['noRefs'] || false; - this.noCompatMode = options['noCompatMode'] || false; - this.condenseFlow = options['condenseFlow'] || false; - - this.implicitTypes = this.schema.compiledImplicit; - this.explicitTypes = this.schema.compiledExplicit; - - this.tag = null; - this.result = ''; - - this.duplicates = []; - this.usedDuplicates = null; -} - -// Indents every line in a string. Empty lines (\n only) are not indented. -function indentString(string, spaces) { - var ind = common.repeat(' ', spaces), - position = 0, - next = -1, - result = '', - line, - length = string.length; - - while (position < length) { - next = string.indexOf('\n', position); - if (next === -1) { - line = string.slice(position); - position = length; - } else { - line = string.slice(position, next + 1); - position = next + 1; - } - - if (line.length && line !== '\n') result += ind; - - result += line; - } - - return result; -} - -function generateNextLine(state, level) { - return '\n' + common.repeat(' ', state.indent * level); -} - -function testImplicitResolving(state, str) { - var index, length, type; - - for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { - type = state.implicitTypes[index]; - - if (type.resolve(str)) { - return true; - } - } - - return false; -} - -// [33] s-white ::= s-space | s-tab -function isWhitespace(c) { - return c === CHAR_SPACE || c === CHAR_TAB; -} - -// Returns true if the character can be printed without escaping. -// From YAML 1.2: "any allowed characters known to be non-printable -// should also be escaped. [However,] This isn’t mandatory" -// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. -function isPrintable(c) { - return (0x00020 <= c && c <= 0x00007E) - || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) - || ((0x0E000 <= c && c <= 0x00FFFD) && c !== 0xFEFF /* BOM */) - || (0x10000 <= c && c <= 0x10FFFF); -} - -// [34] ns-char ::= nb-char - s-white -// [27] nb-char ::= c-printable - b-char - c-byte-order-mark -// [26] b-char ::= b-line-feed | b-carriage-return -// [24] b-line-feed ::= #xA /* LF */ -// [25] b-carriage-return ::= #xD /* CR */ -// [3] c-byte-order-mark ::= #xFEFF -function isNsChar(c) { - return isPrintable(c) && !isWhitespace(c) - // byte-order-mark - && c !== 0xFEFF - // b-char - && c !== CHAR_CARRIAGE_RETURN - && c !== CHAR_LINE_FEED; -} - -// Simplified test for values allowed after the first character in plain style. -function isPlainSafe(c, prev) { - // Uses a subset of nb-char - c-flow-indicator - ":" - "#" - // where nb-char ::= c-printable - b-char - c-byte-order-mark. - return isPrintable(c) && c !== 0xFEFF - // - c-flow-indicator - && c !== CHAR_COMMA - && c !== CHAR_LEFT_SQUARE_BRACKET - && c !== CHAR_RIGHT_SQUARE_BRACKET - && c !== CHAR_LEFT_CURLY_BRACKET - && c !== CHAR_RIGHT_CURLY_BRACKET - // - ":" - "#" - // /* An ns-char preceding */ "#" - && c !== CHAR_COLON - && ((c !== CHAR_SHARP) || (prev && isNsChar(prev))); -} - -// Simplified test for values allowed as the first character in plain style. -function isPlainSafeFirst(c) { - // Uses a subset of ns-char - c-indicator - // where ns-char = nb-char - s-white. - return isPrintable(c) && c !== 0xFEFF - && !isWhitespace(c) // - s-white - // - (c-indicator ::= - // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” - && c !== CHAR_MINUS - && c !== CHAR_QUESTION - && c !== CHAR_COLON - && c !== CHAR_COMMA - && c !== CHAR_LEFT_SQUARE_BRACKET - && c !== CHAR_RIGHT_SQUARE_BRACKET - && c !== CHAR_LEFT_CURLY_BRACKET - && c !== CHAR_RIGHT_CURLY_BRACKET - // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” - && c !== CHAR_SHARP - && c !== CHAR_AMPERSAND - && c !== CHAR_ASTERISK - && c !== CHAR_EXCLAMATION - && c !== CHAR_VERTICAL_LINE - && c !== CHAR_EQUALS - && c !== CHAR_GREATER_THAN - && c !== CHAR_SINGLE_QUOTE - && c !== CHAR_DOUBLE_QUOTE - // | “%” | “@” | “`”) - && c !== CHAR_PERCENT - && c !== CHAR_COMMERCIAL_AT - && c !== CHAR_GRAVE_ACCENT; -} - -// Determines whether block indentation indicator is required. -function needIndentIndicator(string) { - var leadingSpaceRe = /^\n* /; - return leadingSpaceRe.test(string); -} - -var STYLE_PLAIN = 1, - STYLE_SINGLE = 2, - STYLE_LITERAL = 3, - STYLE_FOLDED = 4, - STYLE_DOUBLE = 5; - -// Determines which scalar styles are possible and returns the preferred style. -// lineWidth = -1 => no limit. -// Pre-conditions: str.length > 0. -// Post-conditions: -// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. -// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). -// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). -function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) { - var i; - var char, prev_char; - var hasLineBreak = false; - var hasFoldableLine = false; // only checked if shouldTrackWidth - var shouldTrackWidth = lineWidth !== -1; - var previousLineBreak = -1; // count the first line correctly - var plain = isPlainSafeFirst(string.charCodeAt(0)) - && !isWhitespace(string.charCodeAt(string.length - 1)); - - if (singleLineOnly) { - // Case: no block styles. - // Check for disallowed characters to rule out plain and single. - for (i = 0; i < string.length; i++) { - char = string.charCodeAt(i); - if (!isPrintable(char)) { - return STYLE_DOUBLE; - } - prev_char = i > 0 ? string.charCodeAt(i - 1) : null; - plain = plain && isPlainSafe(char, prev_char); - } - } else { - // Case: block styles permitted. - for (i = 0; i < string.length; i++) { - char = string.charCodeAt(i); - if (char === CHAR_LINE_FEED) { - hasLineBreak = true; - // Check if any line can be folded. - if (shouldTrackWidth) { - hasFoldableLine = hasFoldableLine || - // Foldable line = too long, and not more-indented. - (i - previousLineBreak - 1 > lineWidth && - string[previousLineBreak + 1] !== ' '); - previousLineBreak = i; - } - } else if (!isPrintable(char)) { - return STYLE_DOUBLE; - } - prev_char = i > 0 ? string.charCodeAt(i - 1) : null; - plain = plain && isPlainSafe(char, prev_char); - } - // in case the end is missing a \n - hasFoldableLine = hasFoldableLine || (shouldTrackWidth && - (i - previousLineBreak - 1 > lineWidth && - string[previousLineBreak + 1] !== ' ')); - } - // Although every style can represent \n without escaping, prefer block styles - // for multiline, since they're more readable and they don't add empty lines. - // Also prefer folding a super-long line. - if (!hasLineBreak && !hasFoldableLine) { - // Strings interpretable as another type have to be quoted; - // e.g. the string 'true' vs. the boolean true. - return plain && !testAmbiguousType(string) - ? STYLE_PLAIN : STYLE_SINGLE; - } - // Edge case: block indentation indicator can only have one digit. - if (indentPerLevel > 9 && needIndentIndicator(string)) { - return STYLE_DOUBLE; - } - // At this point we know block styles are valid. - // Prefer literal style unless we want to fold. - return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; -} - -// Note: line breaking/folding is implemented for only the folded style. -// NB. We drop the last trailing newline (if any) of a returned block scalar -// since the dumper adds its own newline. This always works: -// • No ending newline => unaffected; already using strip "-" chomping. -// • Ending newline => removed then restored. -// Importantly, this keeps the "+" chomp indicator from gaining an extra line. -function writeScalar(state, string, level, iskey) { - state.dump = (function () { - if (string.length === 0) { - return "''"; - } - if (!state.noCompatMode && - DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1) { - return "'" + string + "'"; - } - - var indent = state.indent * Math.max(1, level); // no 0-indent scalars - // As indentation gets deeper, let the width decrease monotonically - // to the lower bound min(state.lineWidth, 40). - // Note that this implies - // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. - // state.lineWidth > 40 + state.indent: width decreases until the lower bound. - // This behaves better than a constant minimum width which disallows narrower options, - // or an indent threshold which causes the width to suddenly increase. - var lineWidth = state.lineWidth === -1 - ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); - - // Without knowing if keys are implicit/explicit, assume implicit for safety. - var singleLineOnly = iskey - // No block styles in flow mode. - || (state.flowLevel > -1 && level >= state.flowLevel); - function testAmbiguity(string) { - return testImplicitResolving(state, string); - } - - switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity)) { - case STYLE_PLAIN: - return string; - case STYLE_SINGLE: - return "'" + string.replace(/'/g, "''") + "'"; - case STYLE_LITERAL: - return '|' + blockHeader(string, state.indent) - + dropEndingNewline(indentString(string, indent)); - case STYLE_FOLDED: - return '>' + blockHeader(string, state.indent) - + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); - case STYLE_DOUBLE: - return '"' + escapeString(string, lineWidth) + '"'; - default: - throw new YAMLException('impossible error: invalid scalar style'); - } - }()); -} - -// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. -function blockHeader(string, indentPerLevel) { - var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; - - // note the special case: the string '\n' counts as a "trailing" empty line. - var clip = string[string.length - 1] === '\n'; - var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); - var chomp = keep ? '+' : (clip ? '' : '-'); - - return indentIndicator + chomp + '\n'; -} - -// (See the note for writeScalar.) -function dropEndingNewline(string) { - return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; -} - -// Note: a long line without a suitable break point will exceed the width limit. -// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. -function foldString(string, width) { - // In folded style, $k$ consecutive newlines output as $k+1$ newlines— - // unless they're before or after a more-indented line, or at the very - // beginning or end, in which case $k$ maps to $k$. - // Therefore, parse each chunk as newline(s) followed by a content line. - var lineRe = /(\n+)([^\n]*)/g; - - // first line (possibly an empty line) - var result = (function () { - var nextLF = string.indexOf('\n'); - nextLF = nextLF !== -1 ? nextLF : string.length; - lineRe.lastIndex = nextLF; - return foldLine(string.slice(0, nextLF), width); - }()); - // If we haven't reached the first content line yet, don't add an extra \n. - var prevMoreIndented = string[0] === '\n' || string[0] === ' '; - var moreIndented; - - // rest of the lines - var match; - while ((match = lineRe.exec(string))) { - var prefix = match[1], line = match[2]; - moreIndented = (line[0] === ' '); - result += prefix - + (!prevMoreIndented && !moreIndented && line !== '' - ? '\n' : '') - + foldLine(line, width); - prevMoreIndented = moreIndented; - } - - return result; -} - -// Greedy line breaking. -// Picks the longest line under the limit each time, -// otherwise settles for the shortest line over the limit. -// NB. More-indented lines *cannot* be folded, as that would add an extra \n. -function foldLine(line, width) { - if (line === '' || line[0] === ' ') return line; - - // Since a more-indented line adds a \n, breaks can't be followed by a space. - var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. - var match; - // start is an inclusive index. end, curr, and next are exclusive. - var start = 0, end, curr = 0, next = 0; - var result = ''; - - // Invariants: 0 <= start <= length-1. - // 0 <= curr <= next <= max(0, length-2). curr - start <= width. - // Inside the loop: - // A match implies length >= 2, so curr and next are <= length-2. - while ((match = breakRe.exec(line))) { - next = match.index; - // maintain invariant: curr - start <= width - if (next - start > width) { - end = (curr > start) ? curr : next; // derive end <= length-2 - result += '\n' + line.slice(start, end); - // skip the space that was output as \n - start = end + 1; // derive start <= length-1 - } - curr = next; - } - - // By the invariants, start <= length-1, so there is something left over. - // It is either the whole string or a part starting from non-whitespace. - result += '\n'; - // Insert a break if the remainder is too long and there is a break available. - if (line.length - start > width && curr > start) { - result += line.slice(start, curr) + '\n' + line.slice(curr + 1); - } else { - result += line.slice(start); - } - - return result.slice(1); // drop extra \n joiner -} - -// Escapes a double-quoted string. -function escapeString(string) { - var result = ''; - var char, nextChar; - var escapeSeq; - - for (var i = 0; i < string.length; i++) { - char = string.charCodeAt(i); - // Check for surrogate pairs (reference Unicode 3.0 section "3.7 Surrogates"). - if (char >= 0xD800 && char <= 0xDBFF/* high surrogate */) { - nextChar = string.charCodeAt(i + 1); - if (nextChar >= 0xDC00 && nextChar <= 0xDFFF/* low surrogate */) { - // Combine the surrogate pair and store it escaped. - result += encodeHex((char - 0xD800) * 0x400 + nextChar - 0xDC00 + 0x10000); - // Advance index one extra since we already used that char here. - i++; continue; - } - } - escapeSeq = ESCAPE_SEQUENCES[char]; - result += !escapeSeq && isPrintable(char) - ? string[i] - : escapeSeq || encodeHex(char); - } - - return result; -} - -function writeFlowSequence(state, level, object) { - var _result = '', - _tag = state.tag, - index, - length; - - for (index = 0, length = object.length; index < length; index += 1) { - // Write only valid elements. - if (writeNode(state, level, object[index], false, false)) { - if (index !== 0) _result += ',' + (!state.condenseFlow ? ' ' : ''); - _result += state.dump; - } - } - - state.tag = _tag; - state.dump = '[' + _result + ']'; -} - -function writeBlockSequence(state, level, object, compact) { - var _result = '', - _tag = state.tag, - index, - length; - - for (index = 0, length = object.length; index < length; index += 1) { - // Write only valid elements. - if (writeNode(state, level + 1, object[index], true, true)) { - if (!compact || index !== 0) { - _result += generateNextLine(state, level); - } - - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - _result += '-'; - } else { - _result += '- '; - } - - _result += state.dump; - } - } - - state.tag = _tag; - state.dump = _result || '[]'; // Empty sequence if no valid values. -} - -function writeFlowMapping(state, level, object) { - var _result = '', - _tag = state.tag, - objectKeyList = Object.keys(object), - index, - length, - objectKey, - objectValue, - pairBuffer; - - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - - pairBuffer = ''; - if (index !== 0) pairBuffer += ', '; - - if (state.condenseFlow) pairBuffer += '"'; - - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; - - if (!writeNode(state, level, objectKey, false, false)) { - continue; // Skip this pair because of invalid key; - } - - if (state.dump.length > 1024) pairBuffer += '? '; - - pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); - - if (!writeNode(state, level, objectValue, false, false)) { - continue; // Skip this pair because of invalid value. - } - - pairBuffer += state.dump; - - // Both key and value are valid. - _result += pairBuffer; - } - - state.tag = _tag; - state.dump = '{' + _result + '}'; -} - -function writeBlockMapping(state, level, object, compact) { - var _result = '', - _tag = state.tag, - objectKeyList = Object.keys(object), - index, - length, - objectKey, - objectValue, - explicitPair, - pairBuffer; - - // Allow sorting keys so that the output file is deterministic - if (state.sortKeys === true) { - // Default sorting - objectKeyList.sort(); - } else if (typeof state.sortKeys === 'function') { - // Custom sort function - objectKeyList.sort(state.sortKeys); - } else if (state.sortKeys) { - // Something is wrong - throw new YAMLException('sortKeys must be a boolean or a function'); - } - - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - pairBuffer = ''; - - if (!compact || index !== 0) { - pairBuffer += generateNextLine(state, level); - } - - objectKey = objectKeyList[index]; - objectValue = object[objectKey]; - - if (!writeNode(state, level + 1, objectKey, true, true, true)) { - continue; // Skip this pair because of invalid key. - } - - explicitPair = (state.tag !== null && state.tag !== '?') || - (state.dump && state.dump.length > 1024); - - if (explicitPair) { - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += '?'; - } else { - pairBuffer += '? '; - } - } - - pairBuffer += state.dump; - - if (explicitPair) { - pairBuffer += generateNextLine(state, level); - } - - if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { - continue; // Skip this pair because of invalid value. - } - - if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { - pairBuffer += ':'; - } else { - pairBuffer += ': '; - } - - pairBuffer += state.dump; - - // Both key and value are valid. - _result += pairBuffer; - } - - state.tag = _tag; - state.dump = _result || '{}'; // Empty mapping if no valid pairs. -} - -function detectType(state, object, explicit) { - var _result, typeList, index, length, type, style; - - typeList = explicit ? state.explicitTypes : state.implicitTypes; - - for (index = 0, length = typeList.length; index < length; index += 1) { - type = typeList[index]; - - if ((type.instanceOf || type.predicate) && - (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && - (!type.predicate || type.predicate(object))) { - - state.tag = explicit ? type.tag : '?'; - - if (type.represent) { - style = state.styleMap[type.tag] || type.defaultStyle; - - if (_toString.call(type.represent) === '[object Function]') { - _result = type.represent(object, style); - } else if (_hasOwnProperty.call(type.represent, style)) { - _result = type.represent[style](object, style); - } else { - throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); - } - - state.dump = _result; - } - - return true; - } - } - - return false; -} - -// Serializes `object` and writes it to global `result`. -// Returns true on success, or false on invalid object. -// -function writeNode(state, level, object, block, compact, iskey) { - state.tag = null; - state.dump = object; - - if (!detectType(state, object, false)) { - detectType(state, object, true); - } - - var type = _toString.call(state.dump); - - if (block) { - block = (state.flowLevel < 0 || state.flowLevel > level); - } - - var objectOrArray = type === '[object Object]' || type === '[object Array]', - duplicateIndex, - duplicate; - - if (objectOrArray) { - duplicateIndex = state.duplicates.indexOf(object); - duplicate = duplicateIndex !== -1; - } - - if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { - compact = false; - } - - if (duplicate && state.usedDuplicates[duplicateIndex]) { - state.dump = '*ref_' + duplicateIndex; - } else { - if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { - state.usedDuplicates[duplicateIndex] = true; - } - if (type === '[object Object]') { - if (block && (Object.keys(state.dump).length !== 0)) { - writeBlockMapping(state, level, state.dump, compact); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + state.dump; - } - } else { - writeFlowMapping(state, level, state.dump); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; - } - } - } else if (type === '[object Array]') { - var arrayLevel = (state.noArrayIndent && (level > 0)) ? level - 1 : level; - if (block && (state.dump.length !== 0)) { - writeBlockSequence(state, arrayLevel, state.dump, compact); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + state.dump; - } - } else { - writeFlowSequence(state, arrayLevel, state.dump); - if (duplicate) { - state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; - } - } - } else if (type === '[object String]') { - if (state.tag !== '?') { - writeScalar(state, state.dump, level, iskey); - } - } else { - if (state.skipInvalid) return false; - throw new YAMLException('unacceptable kind of an object to dump ' + type); - } - - if (state.tag !== null && state.tag !== '?') { - state.dump = '!<' + state.tag + '> ' + state.dump; - } - } - - return true; -} - -function getDuplicateReferences(object, state) { - var objects = [], - duplicatesIndexes = [], - index, - length; - - inspectNode(object, objects, duplicatesIndexes); - - for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { - state.duplicates.push(objects[duplicatesIndexes[index]]); - } - state.usedDuplicates = new Array(length); -} - -function inspectNode(object, objects, duplicatesIndexes) { - var objectKeyList, - index, - length; - - if (object !== null && typeof object === 'object') { - index = objects.indexOf(object); - if (index !== -1) { - if (duplicatesIndexes.indexOf(index) === -1) { - duplicatesIndexes.push(index); - } - } else { - objects.push(object); - - if (Array.isArray(object)) { - for (index = 0, length = object.length; index < length; index += 1) { - inspectNode(object[index], objects, duplicatesIndexes); - } - } else { - objectKeyList = Object.keys(object); - - for (index = 0, length = objectKeyList.length; index < length; index += 1) { - inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); - } - } - } - } -} - -function dump(input, options) { - options = options || {}; - - var state = new State(options); - - if (!state.noRefs) getDuplicateReferences(input, state); - - if (writeNode(state, 0, input, true, true)) return state.dump + '\n'; - - return ''; -} - -function safeDump(input, options) { - return dump(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); -} - -module.exports.dump = dump; -module.exports.safeDump = safeDump; - - -/***/ }), -/* 474 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const assert = __webpack_require__(357) - -const isWindows = (process.platform === 'win32') - -function defaults (options) { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 -} - -function rimraf (p, options, cb) { - let busyTries = 0 - - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && - busyTries < options.maxBusyTries) { - busyTries++ - const time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), time) - } - - // already gone - if (er.code === 'ENOENT') er = null - } - - cb(er) - }) -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === 'ENOENT') { - return cb(null) - } - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === 'EPERM' && isWindows) { - return fixWinEPERM(p, options, er, cb) - } - - if (st && st.isDirectory()) { - return rmdir(p, options, er, cb) - } - - options.unlink(p, er => { - if (er) { - if (er.code === 'ENOENT') { - return cb(null) - } - if (er.code === 'EPERM') { - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - } - if (er.code === 'EISDIR') { - return rmdir(p, options, er, cb) - } - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) { - cb(er2.code === 'ENOENT' ? null : er) - } else { - options.stat(p, (er3, stats) => { - if (er3) { - cb(er3.code === 'ENOENT' ? null : er) - } else if (stats.isDirectory()) { - rmdir(p, options, er, cb) - } else { - options.unlink(p, cb) - } - }) - } - }) -} - -function fixWinEPERMSync (p, options, er) { - let stats - - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === 'ENOENT') { - return - } else { - throw er - } - } - - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === 'ENOENT') { - return - } else { - throw er - } - } - - if (stats.isDirectory()) { - rmdirSync(p, options, er) - } else { - options.unlinkSync(p) - } -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { - rmkids(p, options, cb) - } else if (er && er.code === 'ENOTDIR') { - cb(originalEr) - } else { - cb(er) - } - }) -} - -function rmkids (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) return cb(er) - - let n = files.length - let errState - - if (n === 0) return options.rmdir(p, cb) - - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) { - return - } - if (er) return cb(errState = er) - if (--n === 0) { - options.rmdir(p, cb) - } - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - let st - - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') - - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === 'ENOENT') { - return - } - - // Windows can EPERM on stat. Life is suffering. - if (er.code === 'EPERM' && isWindows) { - fixWinEPERMSync(p, options, er) - } - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) { - rmdirSync(p, options, null) - } else { - options.unlinkSync(p) - } - } catch (er) { - if (er.code === 'ENOENT') { - return - } else if (er.code === 'EPERM') { - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - } else if (er.code !== 'EISDIR') { - throw er - } - rmdirSync(p, options, er) - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === 'ENOTDIR') { - throw originalEr - } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { - rmkidsSync(p, options) - } else if (er.code !== 'ENOENT') { - throw er - } - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - if (isWindows) { - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const startTime = Date.now() - do { - try { - const ret = options.rmdirSync(p, options) - return ret - } catch {} - } while (Date.now() - startTime < 500) // give up after 500ms - } else { - const ret = options.rmdirSync(p, options) - return ret - } -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - -/***/ }), -/* 475 */, -/* 476 */, -/* 477 */ -/***/ (function(module) { - -function stringify (obj, options = {}) { - const EOL = options.EOL || '\n' - - const str = JSON.stringify(obj, options ? options.replacer : null, options.spaces) - - return str.replace(/\n/g, EOL) + EOL -} - -function stripBom (content) { - // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified - if (Buffer.isBuffer(content)) content = content.toString('utf8') - return content.replace(/^\uFEFF/, '') -} - -module.exports = { stringify, stripBom } - - -/***/ }), -/* 478 */, -/* 479 */, -/* 480 */, -/* 481 */, -/* 482 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -var YAML_DATE_REGEXP = new RegExp( - '^([0-9][0-9][0-9][0-9])' + // [1] year - '-([0-9][0-9])' + // [2] month - '-([0-9][0-9])$'); // [3] day - -var YAML_TIMESTAMP_REGEXP = new RegExp( - '^([0-9][0-9][0-9][0-9])' + // [1] year - '-([0-9][0-9]?)' + // [2] month - '-([0-9][0-9]?)' + // [3] day - '(?:[Tt]|[ \\t]+)' + // ... - '([0-9][0-9]?)' + // [4] hour - ':([0-9][0-9])' + // [5] minute - ':([0-9][0-9])' + // [6] second - '(?:\\.([0-9]*))?' + // [7] fraction - '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour - '(?::([0-9][0-9]))?))?$'); // [11] tz_minute - -function resolveYamlTimestamp(data) { - if (data === null) return false; - if (YAML_DATE_REGEXP.exec(data) !== null) return true; - if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; - return false; -} - -function constructYamlTimestamp(data) { - var match, year, month, day, hour, minute, second, fraction = 0, - delta = null, tz_hour, tz_minute, date; - - match = YAML_DATE_REGEXP.exec(data); - if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); - - if (match === null) throw new Error('Date resolve error'); - - // match: [1] year [2] month [3] day - - year = +(match[1]); - month = +(match[2]) - 1; // JS month starts with 0 - day = +(match[3]); - - if (!match[4]) { // no hour - return new Date(Date.UTC(year, month, day)); - } - - // match: [4] hour [5] minute [6] second [7] fraction - - hour = +(match[4]); - minute = +(match[5]); - second = +(match[6]); - - if (match[7]) { - fraction = match[7].slice(0, 3); - while (fraction.length < 3) { // milli-seconds - fraction += '0'; - } - fraction = +fraction; - } - - // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute - - if (match[9]) { - tz_hour = +(match[10]); - tz_minute = +(match[11] || 0); - delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds - if (match[9] === '-') delta = -delta; - } - - date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); - - if (delta) date.setTime(date.getTime() - delta); - - return date; -} - -function representYamlTimestamp(object /*, style*/) { - return object.toISOString(); -} - -module.exports = new Type('tag:yaml.org,2002:timestamp', { - kind: 'scalar', - resolve: resolveYamlTimestamp, - construct: constructYamlTimestamp, - instanceOf: Date, - represent: representYamlTimestamp -}); - - -/***/ }), -/* 483 */, -/* 484 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { spawn } = __webpack_require__(129); -var assert = __webpack_require__(357); - -const { TimeoutError, logger } = __webpack_require__(79); -const fs = __webpack_require__(747); - -class ExitError extends Error { - constructor(message, code) { - super(message); - this.code = code; - } -} - -const FETCH_DEPTH = 10; - -const COMMON_ARGS = [ - "-c", - "user.name=GitHub", - "-c", - "user.email=noreply@github.com" -]; - -function git(cwd, ...args) { - const stdio = [ - "ignore", - "pipe", - logger.level === "trace" || logger.level === "debug" ? "inherit" : "pipe" - ]; - // the URL passed to the clone command could contain a password! - const command = `git ${args.join(" ")}`; - logger.debug("Executing", command); - return new Promise((resolve, reject) => { - const proc = spawn( - "git", - COMMON_ARGS.concat(args.filter(a => a !== null)), - { cwd, stdio } - ); - const buffers = []; - proc.stdout.on("data", data => buffers.push(data)); - proc.stderr.on("data", data => buffers.push(data)); - - proc.on("error", () => { - reject(new Error(`command failed: ${command}`)); - }); - proc.on("exit", code => { - const stdoutData = Buffer.concat(buffers); - if (code === 0) { - resolve(stdoutData.toString("utf8").trim()); - } else { - reject( - new ExitError( - `command ${command} failed with code ${code}. Error Message: ${stdoutData}`, - code - ) - ); - } - }); - }); -} - -async function clone(from, to, branch) { - if (!fs.existsSync(to)) { - await git( - ".", - "clone", - "--quiet", - "--shallow-submodules", - "--no-tags", - "--branch", - branch, - from, - to - ); - } else { - logger.warn(`Folder ${to} already exist. Won't clone`); - } -} - -async function fetch(dir, branch) { - await git( - dir, - "fetch", - "--quiet", - "origin", - `${branch}:refs/remotes/origin/${branch}` - ); -} - -async function fetchUntilMergeBase(dir, branch, timeout) { - const maxTime = new Date().getTime() + timeout; - const ref = `refs/remotes/origin/${branch}`; - while (new Date().getTime() < maxTime) { - const base = await mergeBase(dir, "HEAD", ref); - if (base) { - const bases = [base]; - const parents = await mergeCommits(dir, ref); - let fetchMore = false; - for (const parent of parents.flat()) { - const b = await mergeBase(dir, parent, ref); - if (b) { - if (!bases.includes(b)) { - bases.push(b); - } - } else { - // we found a commit which does not have a common ancestor with - // the branch we want to merge, so we need to fetch more - fetchMore = true; - break; - } - } - if (!fetchMore) { - const commonBase = await mergeBase(dir, ...bases); - if (!commonBase) { - throw new Error(`failed to find common base for ${bases}`); - } - return commonBase; - } - } - await fetchDeepen(dir); - } - throw new TimeoutError(); -} - -async function fetchDeepen(dir) { - await git(dir, "fetch", "--quiet", "--deepen", FETCH_DEPTH); -} - -async function mergeBase(dir, ...refs) { - if (refs.length === 1) { - return refs[0]; - } else if (refs.length < 1) { - throw new Error("empty refs!"); - } - let todo = refs; - try { - while (todo.length > 1) { - const base = await git(dir, "merge-base", todo[0], todo[1]); - todo = [base].concat(todo.slice(2)); - } - return todo[0]; - } catch (e) { - if (e instanceof ExitError && e.code === 1) { - return null; - } else { - throw e; - } - } -} - -async function mergeCommits(dir, ref) { - return (await git(dir, "rev-list", "--parents", `${ref}..HEAD`)) - .split(/\n/g) - .map(line => line.split(/ /g).slice(1)) - .filter(commit => commit.length > 1); -} - -async function merge(dir, group, repositoryName, branch) { - return await git( - dir, - "pull", - `https://github.com/${group}/${repositoryName}`, - branch - ); -} - -async function head(dir) { - return await git(dir, "show-ref", "--head", "-s", "/HEAD"); -} - -async function sha(dir, branch) { - return await git(dir, "show-ref", "-s", `refs/remotes/origin/${branch}`); -} - -async function rebase(dir, branch) { - return await git(dir, "rebase", "--quiet", "--autosquash", branch); -} - -async function push(dir, force, branch) { - return await git( - dir, - "push", - "--quiet", - force ? "--force-with-lease" : null, - "origin", - branch - ); -} - -async function doesBranchExist(octokit, owner, repo, branch) { - assert(owner, "owner is not defined"); - assert(repo, "repo is not defined"); - assert(branch, "branch is not defined"); - try { - const { status } = await octokit.repos.getBranch({ - owner, - repo, - branch - }); - return status == 200; - } catch (e) { - logger.warn( - `project github.com/${owner}/${repo}:${branch} does not exist. It's not necessarily an error.` - ); - return false; - } -} - -/** - * Checks if there is a pull request either from a forked project or the same project - * @param {Object} octokit instance - * @param {String} owner the repo owner or group - * @param {String} repo the repository name - * @param {String} branch the branch of the pull request to look for - * @param {String} fromAuthor the pull request author - */ -async function hasPullRequest(octokit, owner, repo, branch, fromAuthor) { - return ( - (await hasForkPullRequest(octokit, owner, repo, branch, fromAuthor)) || - (await hasOriginPullRequest(octokit, owner, repo, branch)) - ); -} - -/** - * Checks if there is a pull request from a forked project - * @param {Object} octokit instance - * @param {String} owner the repo owner or group - * @param {String} repo the repository name - * @param {String} branch the branch of the pull request to look for - * @param {String} fromAuthor the pull request author - */ -async function hasForkPullRequest(octokit, owner, repo, branch, fromAuthor) { - assert(owner, "owner is not defined"); - assert(repo, "repo is not defined"); - assert(branch, "branch is not defined"); - assert(fromAuthor, "fromAuthor is not defined"); - try { - const { status, data } = await octokit.pulls.list({ - owner, - repo, - state: "open", - head: `${fromAuthor}:${branch}` - }); - return status == 200 && data.length > 0; - } catch (e) { - logger.error( - `Error getting pull request list from https://api.github.com/repos/${owner}/${repo}/pulls?head=${fromAuthor}:${branch}&state=open'".` - ); - throw e; - } -} - -/** - * Checks if there is a pull request from the same project - * @param {Object} octokit instance - * @param {String} owner the repo owner or group - * @param {String} repo the repository name - * @param {String} branch the branch of the pull request to look for - */ -async function hasOriginPullRequest(octokit, owner, repo, branch) { - assert(owner, "owner is not defined"); - assert(repo, "repo is not defined"); - assert(branch, "branch is not defined"); - try { - const { status, data } = await octokit.pulls.list({ - owner, - repo, - state: "open", - head: `${owner}:${branch}` - }); - return status == 200 && data.length > 0; - } catch (e) { - logger.error( - `Error getting pull request list from https://api.github.com/repos/${owner}/${repo}/pulls?head=${owner}:${branch}&state=open'".` - ); - throw e; - } -} - -async function getForkedProject( - octokit, - owner, - repo, - wantedOwner, - page = 1, - per_page = 100 -) { - assert(owner, "owner is not defined"); - assert(repo, "repo is not defined"); - assert(wantedOwner, "wantedOwner is not defined"); - assert(page, "page is not defined"); - try { - const { status, data } = await octokit.repos.listForks({ - owner, - repo, - page - }); - if (status == 200) { - if (data && data.length > 0) { - const forkedProject = data.find( - forkedProject => forkedProject.owner.login === wantedOwner - ); - return forkedProject - ? forkedProject - : await getForkedProject( - octokit, - owner, - repo, - wantedOwner, - ++page, - per_page - ); - } else { - return undefined; - } - } - } catch (e) { - logger.error( - `Error getting forked project list from https://api.github.com/repos/${owner}/${repo}/forks?per_page=${per_page}&page=${page}'".` - ); - throw e; - } -} - -module.exports = { - ExitError, - git, - clone, - fetch, - fetchUntilMergeBase, - fetchDeepen, - mergeBase, - mergeCommits, - merge, - head, - sha, - rebase, - push, - doesBranchExist, - hasPullRequest, - getForkedProject -}; - - -/***/ }), -/* 485 */, -/* 486 */, -/* 487 */, -/* 488 */, -/* 489 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const path = __webpack_require__(622); -const which = __webpack_require__(814); -const pathKey = __webpack_require__(39)(); - -function resolveCommandAttempt(parsed, withoutPathExt) { - const cwd = process.cwd(); - const hasCustomCwd = parsed.options.cwd != null; - - // If a custom `cwd` was specified, we need to change the process cwd - // because `which` will do stat calls but does not support a custom cwd - if (hasCustomCwd) { - try { - process.chdir(parsed.options.cwd); - } catch (err) { - /* Empty */ - } - } - - let resolved; - - try { - resolved = which.sync(parsed.command, { - path: (parsed.options.env || process.env)[pathKey], - pathExt: withoutPathExt ? path.delimiter : undefined, - }); - } catch (e) { - /* Empty */ - } finally { - process.chdir(cwd); - } - - // If we successfully resolved, ensure that an absolute path is returned - // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it - if (resolved) { - resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); - } - - return resolved; -} - -function resolveCommand(parsed) { - return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); -} - -module.exports = resolveCommand; - - -/***/ }), -/* 490 */, -/* 491 */, -/* 492 */, -/* 493 */, -/* 494 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const os = __webpack_require__(87); -const execa = __webpack_require__(955); - -// Reference: https://www.gaijin.at/en/lstwinver.php -const names = new Map([ - ['10.0', '10'], - ['6.3', '8.1'], - ['6.2', '8'], - ['6.1', '7'], - ['6.0', 'Vista'], - ['5.2', 'Server 2003'], - ['5.1', 'XP'], - ['5.0', '2000'], - ['4.9', 'ME'], - ['4.1', '98'], - ['4.0', '95'] -]); - -const windowsRelease = release => { - const version = /\d+\.\d/.exec(release || os.release()); - - if (release && !version) { - throw new Error('`release` argument doesn\'t match `n.n`'); - } - - const ver = (version || [])[0]; - - // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. - // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version - // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx - // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. - // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. - if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { - let stdout; - try { - stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; - } catch (_) { - stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; - } - - const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; - - if (year) { - return `Server ${year}`; - } - } - - return names.get(ver); -}; - -module.exports = windowsRelease; - - -/***/ }), -/* 495 */ -/***/ (function(module) { - -/** - * it generates an ordered array of node's parents from top to bottom - * @param {Object} node the node object with its parents and children - * @param {Array} nodeList the nodeList to fill - */ -function parentChainFromNode(node) { - const result = node.parents - .reduce((acc, parent) => { - acc.push( - ...parentChainFromNode(parent) - .filter(e => e && !acc.find(a => a.project === e.project)) - .map(e => { - return { ...e }; - }) - ); - return acc; - }, []) - .filter(e => e); - result.push(node); - return result; -} - -/** - * it generates an ordered array of node's children from top to bottom - * @param {Object} node the node object with its parents and children - * @param {Array} nodeList the nodeList to fill - */ -function childChainFromNode(node, nodeList = []) { - const index = nodeList.findIndex(n => n.project === node.project); - if (index > -1) { - nodeList.splice(index, 1); - } - nodeList.push(node); - node.children.forEach(child => childChainFromNode(child, nodeList)); - return nodeList; -} - -/** - * - * @param {String} key the JSON.stringify key - * @param {Object} value the JSON.stringify value - * @param {Array} cache an empty array instance - */ -function jsonStringFunction(key, value, cache) { - { - return ["parents", "children"].includes(key) - ? value.map(node => { - if (!cache.includes(node.project)) { - cache.push(node.project); - return node; - } - return { - project: node.project, - warning: - "rest of the node information removed to avoid circular dependency problem. The node information is already defined in the json." - }; - }) - : value; - } -} - -module.exports = { - parentChainFromNode, - childChainFromNode, - jsonStringFunction -}; - - -/***/ }), -/* 496 */, -/* 497 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; -// Standard YAML's Failsafe schema. -// http://www.yaml.org/spec/1.2/spec.html#id2802346 - - - - - -var Schema = __webpack_require__(917); - - -module.exports = new Schema({ - explicit: [ - __webpack_require__(782), - __webpack_require__(654), - __webpack_require__(325) - ] -}); - - -/***/ }), -/* 498 */, -/* 499 */, -/* 500 */ -/***/ (function(module) { - -function ReadYamlException(message) { - this.message = message; - this.name = "ReadYamlException"; -} - -module.exports = { ReadYamlException }; - - -/***/ }), -/* 501 */, -/* 502 */, -/* 503 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { run: uploadArtifacts } = __webpack_require__(395); -const { logger } = __webpack_require__(79); - -async function archiveArtifacts(nodeTirggering, nodeArray, on) { - const nodesToArchive = getNodesToArchive(nodeTirggering, nodeArray); - logger.info( - nodesToArchive.length > 0 - ? `Archiving artifacts for ${nodesToArchive.map(node => node.project)}` - : "No artifacts to archive" - ); - - await uploadNodes(nodesToArchive, on); -} - -function getNodesToArchive(nodeTriggering, nodeArray) { - const archiveDependencies = - nodeTriggering.build["archive-artifacts"] && - nodeTriggering.build["archive-artifacts"].dependencies - ? nodeTriggering.build["archive-artifacts"].dependencies - : "none"; - - return archiveDependencies === "none" - ? [nodeTriggering].filter( - node => - node.build["archive-artifacts"] && - node.build["archive-artifacts"].paths - ) - : nodeArray.filter( - node => - node.build["archive-artifacts"] && - node.build["archive-artifacts"].paths && - (archiveDependencies === "all" || - archiveDependencies.includes(node.project) || - node.project === nodeTriggering.project) - ); -} - -async function uploadNodes(nodesToArchive, on) { - await Promise.allSettled( - nodesToArchive.map(async node => { - logger.info(`Project [${node.project}]. Uploading artifacts...`); - const uploadResponse = await uploadArtifacts( - node.build["archive-artifacts"], - on - ); - if (uploadResponse) { - const uploadArtifactsMessage = - uploadResponse.artifactItems && - uploadResponse.artifactItems.length > 0 - ? `Uploaded Items (${uploadResponse.artifactItems.length}): ${uploadResponse.artifactItems}.` - : ""; - if ( - uploadResponse.failedItems && - uploadResponse.failedItems.length > 0 - ) { - logger.error( - `Project [${node.project}] Failed State. Artifact [${uploadResponse.artifactName}]. Failed Items (${uploadResponse.failedItems.length}): ${uploadResponse.failedItems}. ${uploadArtifactsMessage}` - ); - return Promise.reject(uploadResponse); - } else { - logger.info( - `Project [${node.project}]. Artifact [${uploadResponse.artifactName}]. ${uploadArtifactsMessage}` - ); - return Promise.resolve(uploadResponse); - } - } else { - logger.info(`Project [${node.project}]. No artifacts uploaded`); - return Promise.resolve(undefined); - } - }) - ).then(promises => { - logger.info("-------------- ARCHIVE ARTIFACTS SUMMARY --------------"); - const totalUploadResponses = promises - .map(promiseResult => promiseResult.value || promiseResult.reason) - .filter( - uploadResponse => - uploadResponse && - uploadResponse.artifactItems && - uploadResponse.artifactItems.length > 0 - ); - const uploadedFiles = totalUploadResponses.flatMap( - uploadResponse => uploadResponse.artifactItems - ); - const failureUploadResponses = promises - .filter(promiseResult => promiseResult.reason) - .map(promiseResult => promiseResult.reason) - .filter( - uploadResponse => - uploadResponse && - uploadResponse.failedItems && - uploadResponse.failedItems.length > 0 - ); - const failedFiles = failureUploadResponses.flatMap( - uploadResponse => uploadResponse.failedItems - ); - logger.info( - `Artifacts uploaded (${ - totalUploadResponses.length - }): ${totalUploadResponses.map( - uploadResponse => uploadResponse.artifactName - )}. Files (${uploadedFiles.length}): ${uploadedFiles}` - ); - logger.info( - `Artifacts failed (${ - failureUploadResponses.length - }): ${failureUploadResponses.map( - uploadResponse => uploadResponse.artifactName - )}. Files (${failedFiles.length}): ${failedFiles}` - ); - }); -} - -module.exports = { archiveArtifacts }; - - -/***/ }), -/* 504 */, -/* 505 */, -/* 506 */, -/* 507 */, -/* 508 */, -/* 509 */, -/* 510 */ -/***/ (function(module) { - -module.exports = addHook - -function addHook (state, kind, name, hook) { - var orig = hook - if (!state.registry[name]) { - state.registry[name] = [] - } - - if (kind === 'before') { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)) - } - } - - if (kind === 'after') { - hook = function (method, options) { - var result - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_ - return orig(result, options) - }) - .then(function () { - return result - }) - } - } - - if (kind === 'error') { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options) - }) - } - } - - state.registry[name].push({ - hook: hook, - orig: orig - }) -} - - -/***/ }), -/* 511 */, -/* 512 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const path = __webpack_require__(622); -const pathKey = __webpack_require__(39); - -module.exports = opts => { - opts = Object.assign({ - cwd: process.cwd(), - path: process.env[pathKey()] - }, opts); - - let prev; - let pth = path.resolve(opts.cwd); - const ret = []; - - while (prev !== pth) { - ret.push(path.join(pth, 'node_modules/.bin')); - prev = pth; - pth = path.resolve(pth, '..'); - } - - // ensure the running `node` binary is used - ret.push(path.dirname(process.execPath)); - - return ret.concat(opts.path).join(path.delimiter); -}; - -module.exports.env = opts => { - opts = Object.assign({ - env: process.env - }, opts); - - const env = Object.assign({}, opts.env); - const path = pathKey({env}); - - opts.path = env[path]; - env[path] = module.exports(opts); - - return env; -}; - - -/***/ }), -/* 513 */, -/* 514 */, -/* 515 */, -/* 516 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -/*eslint-disable no-bitwise*/ - -var NodeBuffer; - -try { - // A trick for browserified version, to not include `Buffer` shim - var _require = require; - NodeBuffer = _require('buffer').Buffer; -} catch (__) {} - -var Type = __webpack_require__(547); - - -// [ 64, 65, 66 ] -> [ padding, CR, LF ] -var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; - - -function resolveYamlBinary(data) { - if (data === null) return false; - - var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; - - // Convert one by one. - for (idx = 0; idx < max; idx++) { - code = map.indexOf(data.charAt(idx)); - - // Skip CR/LF - if (code > 64) continue; - - // Fail on illegal characters - if (code < 0) return false; - - bitlen += 6; - } - - // If there are any bits left, source was corrupted - return (bitlen % 8) === 0; -} - -function constructYamlBinary(data) { - var idx, tailbits, - input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan - max = input.length, - map = BASE64_MAP, - bits = 0, - result = []; - - // Collect by 6*4 bits (3 bytes) - - for (idx = 0; idx < max; idx++) { - if ((idx % 4 === 0) && idx) { - result.push((bits >> 16) & 0xFF); - result.push((bits >> 8) & 0xFF); - result.push(bits & 0xFF); - } - - bits = (bits << 6) | map.indexOf(input.charAt(idx)); - } - - // Dump tail - - tailbits = (max % 4) * 6; - - if (tailbits === 0) { - result.push((bits >> 16) & 0xFF); - result.push((bits >> 8) & 0xFF); - result.push(bits & 0xFF); - } else if (tailbits === 18) { - result.push((bits >> 10) & 0xFF); - result.push((bits >> 2) & 0xFF); - } else if (tailbits === 12) { - result.push((bits >> 4) & 0xFF); - } - - // Wrap into Buffer for NodeJS and leave Array for browser - if (NodeBuffer) { - // Support node 6.+ Buffer API when available - return NodeBuffer.from ? NodeBuffer.from(result) : new NodeBuffer(result); - } - - return result; -} - -function representYamlBinary(object /*, style*/) { - var result = '', bits = 0, idx, tail, - max = object.length, - map = BASE64_MAP; - - // Convert every three bytes to 4 ASCII characters. - - for (idx = 0; idx < max; idx++) { - if ((idx % 3 === 0) && idx) { - result += map[(bits >> 18) & 0x3F]; - result += map[(bits >> 12) & 0x3F]; - result += map[(bits >> 6) & 0x3F]; - result += map[bits & 0x3F]; - } - - bits = (bits << 8) + object[idx]; - } - - // Dump tail - - tail = max % 3; - - if (tail === 0) { - result += map[(bits >> 18) & 0x3F]; - result += map[(bits >> 12) & 0x3F]; - result += map[(bits >> 6) & 0x3F]; - result += map[bits & 0x3F]; - } else if (tail === 2) { - result += map[(bits >> 10) & 0x3F]; - result += map[(bits >> 4) & 0x3F]; - result += map[(bits << 2) & 0x3F]; - result += map[64]; - } else if (tail === 1) { - result += map[(bits >> 2) & 0x3F]; - result += map[(bits << 4) & 0x3F]; - result += map[64]; - result += map[64]; - } - - return result; -} - -function isBinary(object) { - return NodeBuffer && NodeBuffer.isBuffer(object); -} - -module.exports = new Type('tag:yaml.org,2002:binary', { - kind: 'scalar', - resolve: resolveYamlBinary, - construct: constructYamlBinary, - predicate: isBinary, - represent: representYamlBinary -}); - - -/***/ }), -/* 517 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const u = __webpack_require__(0).fromCallback -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const mkdir = __webpack_require__(884) -const pathExists = __webpack_require__(322).pathExists - -function outputFile (file, data, encoding, callback) { - if (typeof encoding === 'function') { - callback = encoding - encoding = 'utf8' - } - - const dir = path.dirname(file) - pathExists(dir, (err, itDoes) => { - if (err) return callback(err) - if (itDoes) return fs.writeFile(file, data, encoding, callback) - - mkdir.mkdirs(dir, err => { - if (err) return callback(err) - - fs.writeFile(file, data, encoding, callback) - }) - }) -} - -function outputFileSync (file, ...args) { - const dir = path.dirname(file) - if (fs.existsSync(dir)) { - return fs.writeFileSync(file, ...args) - } - mkdir.mkdirsSync(dir) - fs.writeFileSync(file, ...args) -} - -module.exports = { - outputFile: u(outputFile), - outputFileSync -} - - -/***/ }), -/* 518 */, -/* 519 */, -/* 520 */, -/* 521 */, -/* 522 */, -/* 523 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var Type = __webpack_require__(547); - -function resolveJavascriptUndefined() { - return true; -} - -function constructJavascriptUndefined() { - /*eslint-disable no-undefined*/ - return undefined; -} - -function representJavascriptUndefined() { - return ''; -} - -function isUndefined(object) { - return typeof object === 'undefined'; -} - -module.exports = new Type('tag:yaml.org,2002:js/undefined', { - kind: 'scalar', - resolve: resolveJavascriptUndefined, - construct: constructJavascriptUndefined, - predicate: isUndefined, - represent: representJavascriptUndefined -}); - - -/***/ }), -/* 524 */, -/* 525 */, -/* 526 */, -/* 527 */, -/* 528 */, -/* 529 */, -/* 530 */, -/* 531 */, -/* 532 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const path = __importStar(__webpack_require__(622)); -/** - * Creates a specification for a set of files that will be downloaded - * @param artifactName the name of the artifact - * @param artifactEntries a set of container entries that describe that files that make up an artifact - * @param downloadPath the path where the artifact will be downloaded to - * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to - */ -function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { - // use a set for the directory paths so that there are no duplicates - const directories = new Set(); - const specifications = { - rootDownloadLocation: includeRootDirectory - ? path.join(downloadPath, artifactName) - : downloadPath, - directoryStructure: [], - emptyFilesToCreate: [], - filesToDownload: [] - }; - for (const entry of artifactEntries) { - // Ignore artifacts in the container that don't begin with the same name - if (entry.path.startsWith(`${artifactName}/`) || - entry.path.startsWith(`${artifactName}\\`)) { - // normalize all separators to the local OS - const normalizedPathEntry = path.normalize(entry.path); - // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path - const filePath = path.join(downloadPath, includeRootDirectory - ? normalizedPathEntry - : normalizedPathEntry.replace(artifactName, '')); - // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' - // itemType cannot be relied upon. The file must be used to determine the directory structure - if (entry.itemType === 'file') { - // Get the directories that we need to create from the filePath for each individual file - directories.add(path.dirname(filePath)); - if (entry.fileLength === 0) { - // An empty file was uploaded, create the empty files locally so that no extra http calls are made - specifications.emptyFilesToCreate.push(filePath); - } - else { - specifications.filesToDownload.push({ - sourceLocation: entry.contentLocation, - targetPath: filePath - }); - } - } - } - } - specifications.directoryStructure = Array.from(directories); - return specifications; -} -exports.getDownloadSpecification = getDownloadSpecification; -//# sourceMappingURL=download-specification.js.map - -/***/ }), -/* 533 */, -/* 534 */, -/* 535 */, -/* 536 */, -/* 537 */, -/* 538 */, -/* 539 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const http = __webpack_require__(605); -const https = __webpack_require__(211); -const pm = __webpack_require__(950); -let tunnel; -var HttpCodes; -(function (HttpCodes) { - HttpCodes[HttpCodes["OK"] = 200] = "OK"; - HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; - HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; - HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; - HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; - HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; - HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; - HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; - HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; - HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; - HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; - HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; - HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; - HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; - HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; - HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; - HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; - HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; - HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; - HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; - HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; - HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; - HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; - HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; - HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; - HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; - HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; -})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); -var Headers; -(function (Headers) { - Headers["Accept"] = "accept"; - Headers["ContentType"] = "content-type"; -})(Headers = exports.Headers || (exports.Headers = {})); -var MediaTypes; -(function (MediaTypes) { - MediaTypes["ApplicationJson"] = "application/json"; -})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); -/** - * Returns the proxy URL, depending upon the supplied url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ -function getProxyUrl(serverUrl) { - let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); - return proxyUrl ? proxyUrl.href : ''; -} -exports.getProxyUrl = getProxyUrl; -const HttpRedirectCodes = [ - HttpCodes.MovedPermanently, - HttpCodes.ResourceMoved, - HttpCodes.SeeOther, - HttpCodes.TemporaryRedirect, - HttpCodes.PermanentRedirect -]; -const HttpResponseRetryCodes = [ - HttpCodes.BadGateway, - HttpCodes.ServiceUnavailable, - HttpCodes.GatewayTimeout -]; -const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; -const ExponentialBackoffCeiling = 10; -const ExponentialBackoffTimeSlice = 5; -class HttpClientError extends Error { - constructor(message, statusCode) { - super(message); - this.name = 'HttpClientError'; - this.statusCode = statusCode; - Object.setPrototypeOf(this, HttpClientError.prototype); - } -} -exports.HttpClientError = HttpClientError; -class HttpClientResponse { - constructor(message) { - this.message = message; - } - readBody() { - return new Promise(async (resolve, reject) => { - let output = Buffer.alloc(0); - this.message.on('data', (chunk) => { - output = Buffer.concat([output, chunk]); - }); - this.message.on('end', () => { - resolve(output.toString()); - }); - }); - } -} -exports.HttpClientResponse = HttpClientResponse; -function isHttps(requestUrl) { - let parsedUrl = new URL(requestUrl); - return parsedUrl.protocol === 'https:'; -} -exports.isHttps = isHttps; -class HttpClient { - constructor(userAgent, handlers, requestOptions) { - this._ignoreSslError = false; - this._allowRedirects = true; - this._allowRedirectDowngrade = false; - this._maxRedirects = 50; - this._allowRetries = false; - this._maxRetries = 1; - this._keepAlive = false; - this._disposed = false; - this.userAgent = userAgent; - this.handlers = handlers || []; - this.requestOptions = requestOptions; - if (requestOptions) { - if (requestOptions.ignoreSslError != null) { - this._ignoreSslError = requestOptions.ignoreSslError; - } - this._socketTimeout = requestOptions.socketTimeout; - if (requestOptions.allowRedirects != null) { - this._allowRedirects = requestOptions.allowRedirects; - } - if (requestOptions.allowRedirectDowngrade != null) { - this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; - } - if (requestOptions.maxRedirects != null) { - this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); - } - if (requestOptions.keepAlive != null) { - this._keepAlive = requestOptions.keepAlive; - } - if (requestOptions.allowRetries != null) { - this._allowRetries = requestOptions.allowRetries; - } - if (requestOptions.maxRetries != null) { - this._maxRetries = requestOptions.maxRetries; - } - } - } - options(requestUrl, additionalHeaders) { - return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); - } - get(requestUrl, additionalHeaders) { - return this.request('GET', requestUrl, null, additionalHeaders || {}); - } - del(requestUrl, additionalHeaders) { - return this.request('DELETE', requestUrl, null, additionalHeaders || {}); - } - post(requestUrl, data, additionalHeaders) { - return this.request('POST', requestUrl, data, additionalHeaders || {}); - } - patch(requestUrl, data, additionalHeaders) { - return this.request('PATCH', requestUrl, data, additionalHeaders || {}); - } - put(requestUrl, data, additionalHeaders) { - return this.request('PUT', requestUrl, data, additionalHeaders || {}); - } - head(requestUrl, additionalHeaders) { - return this.request('HEAD', requestUrl, null, additionalHeaders || {}); - } - sendStream(verb, requestUrl, stream, additionalHeaders) { - return this.request(verb, requestUrl, stream, additionalHeaders); - } - /** - * Gets a typed object from an endpoint - * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise - */ - async getJson(requestUrl, additionalHeaders = {}) { - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - let res = await this.get(requestUrl, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async postJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.post(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async putJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.put(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - async patchJson(requestUrl, obj, additionalHeaders = {}) { - let data = JSON.stringify(obj, null, 2); - additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); - additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); - let res = await this.patch(requestUrl, data, additionalHeaders); - return this._processResponse(res, this.requestOptions); - } - /** - * Makes a raw http request. - * All other methods such as get, post, patch, and request ultimately call this. - * Prefer get, del, post and patch - */ - async request(verb, requestUrl, data, headers) { - if (this._disposed) { - throw new Error('Client has already been disposed.'); - } - let parsedUrl = new URL(requestUrl); - let info = this._prepareRequest(verb, parsedUrl, headers); - // Only perform retries on reads since writes may not be idempotent. - let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 - ? this._maxRetries + 1 - : 1; - let numTries = 0; - let response; - while (numTries < maxTries) { - response = await this.requestRaw(info, data); - // Check if it's an authentication challenge - if (response && - response.message && - response.message.statusCode === HttpCodes.Unauthorized) { - let authenticationHandler; - for (let i = 0; i < this.handlers.length; i++) { - if (this.handlers[i].canHandleAuthentication(response)) { - authenticationHandler = this.handlers[i]; - break; - } - } - if (authenticationHandler) { - return authenticationHandler.handleAuthentication(this, info, data); - } - else { - // We have received an unauthorized response but have no handlers to handle it. - // Let the response return to the caller. - return response; - } - } - let redirectsRemaining = this._maxRedirects; - while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && - this._allowRedirects && - redirectsRemaining > 0) { - const redirectUrl = response.message.headers['location']; - if (!redirectUrl) { - // if there's no location to redirect to, we won't - break; - } - let parsedRedirectUrl = new URL(redirectUrl); - if (parsedUrl.protocol == 'https:' && - parsedUrl.protocol != parsedRedirectUrl.protocol && - !this._allowRedirectDowngrade) { - throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); - } - // we need to finish reading the response before reassigning response - // which will leak the open socket. - await response.readBody(); - // strip authorization header if redirected to a different hostname - if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { - for (let header in headers) { - // header names are case insensitive - if (header.toLowerCase() === 'authorization') { - delete headers[header]; - } - } - } - // let's make the request with the new redirectUrl - info = this._prepareRequest(verb, parsedRedirectUrl, headers); - response = await this.requestRaw(info, data); - redirectsRemaining--; - } - if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { - // If not a retry code, return immediately instead of retrying - return response; - } - numTries += 1; - if (numTries < maxTries) { - await response.readBody(); - await this._performExponentialBackoff(numTries); - } - } - return response; - } - /** - * Needs to be called if keepAlive is set to true in request options. - */ - dispose() { - if (this._agent) { - this._agent.destroy(); - } - this._disposed = true; - } - /** - * Raw request. - * @param info - * @param data - */ - requestRaw(info, data) { - return new Promise((resolve, reject) => { - let callbackForResult = function (err, res) { - if (err) { - reject(err); - } - resolve(res); - }; - this.requestRawWithCallback(info, data, callbackForResult); - }); - } - /** - * Raw request with callback. - * @param info - * @param data - * @param onResult - */ - requestRawWithCallback(info, data, onResult) { - let socket; - if (typeof data === 'string') { - info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); - } - let callbackCalled = false; - let handleResult = (err, res) => { - if (!callbackCalled) { - callbackCalled = true; - onResult(err, res); - } - }; - let req = info.httpModule.request(info.options, (msg) => { - let res = new HttpClientResponse(msg); - handleResult(null, res); - }); - req.on('socket', sock => { - socket = sock; - }); - // If we ever get disconnected, we want the socket to timeout eventually - req.setTimeout(this._socketTimeout || 3 * 60000, () => { - if (socket) { - socket.end(); - } - handleResult(new Error('Request timeout: ' + info.options.path), null); - }); - req.on('error', function (err) { - // err has statusCode property - // res should have headers - handleResult(err, null); - }); - if (data && typeof data === 'string') { - req.write(data, 'utf8'); - } - if (data && typeof data !== 'string') { - data.on('close', function () { - req.end(); - }); - data.pipe(req); - } - else { - req.end(); - } - } - /** - * Gets an http agent. This function is useful when you need an http agent that handles - * routing through a proxy server - depending upon the url and proxy environment variables. - * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com - */ - getAgent(serverUrl) { - let parsedUrl = new URL(serverUrl); - return this._getAgent(parsedUrl); - } - _prepareRequest(method, requestUrl, headers) { - const info = {}; - info.parsedUrl = requestUrl; - const usingSsl = info.parsedUrl.protocol === 'https:'; - info.httpModule = usingSsl ? https : http; - const defaultPort = usingSsl ? 443 : 80; - info.options = {}; - info.options.host = info.parsedUrl.hostname; - info.options.port = info.parsedUrl.port - ? parseInt(info.parsedUrl.port) - : defaultPort; - info.options.path = - (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); - info.options.method = method; - info.options.headers = this._mergeHeaders(headers); - if (this.userAgent != null) { - info.options.headers['user-agent'] = this.userAgent; - } - info.options.agent = this._getAgent(info.parsedUrl); - // gives handlers an opportunity to participate - if (this.handlers) { - this.handlers.forEach(handler => { - handler.prepareRequest(info.options); - }); - } - return info; - } - _mergeHeaders(headers) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - if (this.requestOptions && this.requestOptions.headers) { - return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); - } - return lowercaseKeys(headers || {}); - } - _getExistingOrDefaultHeader(additionalHeaders, header, _default) { - const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); - let clientHeader; - if (this.requestOptions && this.requestOptions.headers) { - clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; - } - return additionalHeaders[header] || clientHeader || _default; - } - _getAgent(parsedUrl) { - let agent; - let proxyUrl = pm.getProxyUrl(parsedUrl); - let useProxy = proxyUrl && proxyUrl.hostname; - if (this._keepAlive && useProxy) { - agent = this._proxyAgent; - } - if (this._keepAlive && !useProxy) { - agent = this._agent; - } - // if agent is already assigned use that agent. - if (!!agent) { - return agent; - } - const usingSsl = parsedUrl.protocol === 'https:'; - let maxSockets = 100; - if (!!this.requestOptions) { - maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; - } - if (useProxy) { - // If using proxy, need tunnel - if (!tunnel) { - tunnel = __webpack_require__(413); - } - const agentOptions = { - maxSockets: maxSockets, - keepAlive: this._keepAlive, - proxy: { - proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, - host: proxyUrl.hostname, - port: proxyUrl.port - } - }; - let tunnelAgent; - const overHttps = proxyUrl.protocol === 'https:'; - if (usingSsl) { - tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; - } - else { - tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; - } - agent = tunnelAgent(agentOptions); - this._proxyAgent = agent; - } - // if reusing agent across request and tunneling agent isn't assigned create a new agent - if (this._keepAlive && !agent) { - const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; - agent = usingSsl ? new https.Agent(options) : new http.Agent(options); - this._agent = agent; - } - // if not using private agent and tunnel agent isn't setup then use global agent - if (!agent) { - agent = usingSsl ? https.globalAgent : http.globalAgent; - } - if (usingSsl && this._ignoreSslError) { - // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process - // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options - // we have to cast it to any and change it directly - agent.options = Object.assign(agent.options || {}, { - rejectUnauthorized: false - }); - } - return agent; - } - _performExponentialBackoff(retryNumber) { - retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); - const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); - return new Promise(resolve => setTimeout(() => resolve(), ms)); - } - static dateTimeDeserializer(key, value) { - if (typeof value === 'string') { - let a = new Date(value); - if (!isNaN(a.valueOf())) { - return a; - } - } - return value; - } - async _processResponse(res, options) { - return new Promise(async (resolve, reject) => { - const statusCode = res.message.statusCode; - const response = { - statusCode: statusCode, - result: null, - headers: {} - }; - // not found leads to null obj returned - if (statusCode == HttpCodes.NotFound) { - resolve(response); - } - let obj; - let contents; - // get the result from the body - try { - contents = await res.readBody(); - if (contents && contents.length > 0) { - if (options && options.deserializeDates) { - obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); - } - else { - obj = JSON.parse(contents); - } - response.result = obj; - } - response.headers = res.message.headers; - } - catch (err) { - // Invalid resource (contents not json); leaving result obj null - } - // note that 3xx redirects are handled by the http layer. - if (statusCode > 299) { - let msg; - // if exception/error in body, attempt to get better error - if (obj && obj.message) { - msg = obj.message; - } - else if (contents && contents.length > 0) { - // it may be the case that the exception is in the body message as string - msg = contents; - } - else { - msg = 'Failed request: (' + statusCode + ')'; - } - let err = new HttpClientError(msg, statusCode); - err.result = response.result; - reject(err); - } - else { - resolve(response); - } - }); - } -} -exports.HttpClient = HttpClient; - - -/***/ }), -/* 540 */, -/* 541 */, -/* 542 */, -/* 543 */, -/* 544 */, -/* 545 */ -/***/ (function(module) { - -module.exports = eval("require")("encoding"); - - -/***/ }), -/* 546 */, -/* 547 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var YAMLException = __webpack_require__(844); - -var TYPE_CONSTRUCTOR_OPTIONS = [ - 'kind', - 'resolve', - 'construct', - 'instanceOf', - 'predicate', - 'represent', - 'defaultStyle', - 'styleAliases' -]; - -var YAML_NODE_KINDS = [ - 'scalar', - 'sequence', - 'mapping' -]; - -function compileStyleAliases(map) { - var result = {}; - - if (map !== null) { - Object.keys(map).forEach(function (style) { - map[style].forEach(function (alias) { - result[String(alias)] = style; - }); - }); - } - - return result; -} - -function Type(tag, options) { - options = options || {}; - - Object.keys(options).forEach(function (name) { - if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { - throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); - } - }); - - // TODO: Add tag format check. - this.tag = tag; - this.kind = options['kind'] || null; - this.resolve = options['resolve'] || function () { return true; }; - this.construct = options['construct'] || function (data) { return data; }; - this.instanceOf = options['instanceOf'] || null; - this.predicate = options['predicate'] || null; - this.represent = options['represent'] || null; - this.defaultStyle = options['defaultStyle'] || null; - this.styleAliases = compileStyleAliases(options['styleAliases'] || null); - - if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { - throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); - } -} - -module.exports = Type; - - -/***/ }), -/* 548 */, -/* 549 */, -/* 550 */, -/* 551 */, -/* 552 */, -/* 553 */, -/* 554 */, -/* 555 */, -/* 556 */ -/***/ (function(module) { - -function treat(command) { - return `${command} -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -B`; -} - -module.exports = { - treat -}; - - -/***/ }), -/* 557 */, -/* 558 */, -/* 559 */, -/* 560 */, -/* 561 */, -/* 562 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var once = __webpack_require__(49); - -var noop = function() {}; - -var isRequest = function(stream) { - return stream.setHeader && typeof stream.abort === 'function'; -}; - -var isChildProcess = function(stream) { - return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 -}; - -var eos = function(stream, opts, callback) { - if (typeof opts === 'function') return eos(stream, null, opts); - if (!opts) opts = {}; - - callback = once(callback || noop); - - var ws = stream._writableState; - var rs = stream._readableState; - var readable = opts.readable || (opts.readable !== false && stream.readable); - var writable = opts.writable || (opts.writable !== false && stream.writable); - var cancelled = false; - - var onlegacyfinish = function() { - if (!stream.writable) onfinish(); - }; - - var onfinish = function() { - writable = false; - if (!readable) callback.call(stream); - }; - - var onend = function() { - readable = false; - if (!writable) callback.call(stream); - }; - - var onexit = function(exitCode) { - callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); - }; - - var onerror = function(err) { - callback.call(stream, err); - }; - - var onclose = function() { - process.nextTick(onclosenexttick); - }; - - var onclosenexttick = function() { - if (cancelled) return; - if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); - if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); - }; - - var onrequest = function() { - stream.req.on('finish', onfinish); - }; - - if (isRequest(stream)) { - stream.on('complete', onfinish); - stream.on('abort', onclose); - if (stream.req) onrequest(); - else stream.on('request', onrequest); - } else if (writable && !ws) { // legacy streams - stream.on('end', onlegacyfinish); - stream.on('close', onlegacyfinish); - } - - if (isChildProcess(stream)) stream.on('exit', onexit); - - stream.on('end', onend); - stream.on('finish', onfinish); - if (opts.error !== false) stream.on('error', onerror); - stream.on('close', onclose); - - return function() { - cancelled = true; - stream.removeListener('complete', onfinish); - stream.removeListener('abort', onclose); - stream.removeListener('request', onrequest); - if (stream.req) stream.req.removeListener('finish', onfinish); - stream.removeListener('end', onlegacyfinish); - stream.removeListener('close', onlegacyfinish); - stream.removeListener('finish', onfinish); - stream.removeListener('exit', onexit); - stream.removeListener('end', onend); - stream.removeListener('error', onerror); - stream.removeListener('close', onclose); - }; -}; - -module.exports = eos; - - -/***/ }), -/* 563 */, -/* 564 */, -/* 565 */, -/* 566 */, -/* 567 */, -/* 568 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const path = __webpack_require__(622); -const niceTry = __webpack_require__(948); -const resolveCommand = __webpack_require__(489); -const escape = __webpack_require__(462); -const readShebang = __webpack_require__(389); -const semver = __webpack_require__(280); - -const isWin = process.platform === 'win32'; -const isExecutableRegExp = /\.(?:com|exe)$/i; -const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; - -// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 -const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; - -function detectShebang(parsed) { - parsed.file = resolveCommand(parsed); - - const shebang = parsed.file && readShebang(parsed.file); - - if (shebang) { - parsed.args.unshift(parsed.file); - parsed.command = shebang; - - return resolveCommand(parsed); - } - - return parsed.file; -} - -function parseNonShell(parsed) { - if (!isWin) { - return parsed; - } - - // Detect & add support for shebangs - const commandFile = detectShebang(parsed); - - // We don't need a shell if the command filename is an executable - const needsShell = !isExecutableRegExp.test(commandFile); - - // If a shell is required, use cmd.exe and take care of escaping everything correctly - // Note that `forceShell` is an hidden option used only in tests - if (parsed.options.forceShell || needsShell) { - // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` - // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument - // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, - // we need to double escape them - const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); - - // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) - // This is necessary otherwise it will always fail with ENOENT in those cases - parsed.command = path.normalize(parsed.command); - - // Escape command & arguments - parsed.command = escape.command(parsed.command); - parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); - - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.command = process.env.comspec || 'cmd.exe'; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } - - return parsed; -} - -function parseShell(parsed) { - // If node supports the shell option, there's no need to mimic its behavior - if (supportsShellOption) { - return parsed; - } - - // Mimic node shell option - // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 - const shellCommand = [parsed.command].concat(parsed.args).join(' '); - - if (isWin) { - parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; - parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; - parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped - } else { - if (typeof parsed.options.shell === 'string') { - parsed.command = parsed.options.shell; - } else if (process.platform === 'android') { - parsed.command = '/system/bin/sh'; - } else { - parsed.command = '/bin/sh'; - } - - parsed.args = ['-c', shellCommand]; - } - - return parsed; -} - -function parse(command, args, options) { - // Normalize arguments, similar to nodejs - if (args && !Array.isArray(args)) { - options = args; - args = null; - } - - args = args ? args.slice(0) : []; // Clone array to avoid changing the original - options = Object.assign({}, options); // Clone object to avoid changing the original - - // Build our parsed object - const parsed = { - command, - args, - options, - file: undefined, - original: { - command, - args, - }, - }; - - // Delegate further parsing to shell or non-shell - return options.shell ? parseShell(parsed) : parseNonShell(parsed); -} - -module.exports = parse; - - -/***/ }), -/* 569 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -module.exports = rimraf -rimraf.sync = rimrafSync - -var assert = __webpack_require__(357) -var path = __webpack_require__(622) -var fs = __webpack_require__(747) -var glob = undefined -try { - glob = __webpack_require__(120) -} catch (_err) { - // treat glob as optional. -} -var _0666 = parseInt('666', 8) - -var defaultGlobOpts = { - nosort: true, - silent: true -} - -// for EMFILE handling -var timeout = 0 - -var isWindows = (process.platform === "win32") - -function defaults (options) { - var methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(function(m) { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - if (options.disableGlob !== true && glob === undefined) { - throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') - } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts -} - -function rimraf (p, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - var busyTries = 0 - var errState = null - var n = 0 - - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) - - options.lstat(p, function (er, stat) { - if (!er) - return afterGlob(null, [p]) - - glob(p, options.glob, afterGlob) - }) - - function next (er) { - errState = errState || er - if (--n === 0) - cb(errState) - } - - function afterGlob (er, results) { - if (er) - return cb(er) - - n = results.length - if (n === 0) - return cb() - - results.forEach(function (p) { - rimraf_(p, options, function CB (er) { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - var time = busyTries * 100 - // try again, with the same exact callback as this one. - return setTimeout(function () { - rimraf_(p, options, CB) - }, time) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(function () { - rimraf_(p, options, CB) - }, timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - next(er) - }) - }) - } -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -function rimraf_ (p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, function (er, st) { - if (er && er.code === "ENOENT") - return cb(null) - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) - - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - - options.unlink(p, function (er) { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) -} - -function fixWinEPERM (p, options, er, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - if (er) - assert(er instanceof Error) - - options.chmod(p, _0666, function (er2) { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, function(er3, stats) { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) -} - -function fixWinEPERMSync (p, options, er) { - assert(p) - assert(options) - if (er) - assert(er instanceof Error) - - try { - options.chmodSync(p, _0666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } - - try { - var stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) -} - -function rmdir (p, options, originalEr, cb) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, function (er) { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} - -function rmkids(p, options, cb) { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, function (er, files) { - if (er) - return cb(er) - var n = files.length - if (n === 0) - return options.rmdir(p, cb) - var errState - files.forEach(function (f) { - rimraf(path.join(p, f), options, function (er) { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -function rimrafSync (p, options) { - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - var results - - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } - } - - if (!results.length) - return - - for (var i = 0; i < results.length; i++) { - var p = results[i] - - try { - var st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er - - rmdirSync(p, options, er) - } - } -} - -function rmdirSync (p, options, originalEr) { - assert(p) - assert(options) - if (originalEr) - assert(originalEr instanceof Error) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) - } -} - -function rmkidsSync (p, options) { - assert(p) - assert(options) - options.readdirSync(p).forEach(function (f) { - rimrafSync(path.join(p, f), options) - }) - - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - var retries = isWindows ? 100 : 1 - var i = 0 - do { - var threw = true - try { - var ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue - } - } while (true) -} - - -/***/ }), -/* 570 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -var esprima; - -// Browserified version does not have esprima -// -// 1. For node.js just require module as deps -// 2. For browser try to require mudule via external AMD system. -// If not found - try to fallback to window.esprima. If not -// found too - then fail to parse. -// -try { - // workaround to exclude package from browserify list. - var _require = require; - esprima = _require('esprima'); -} catch (_) { - /* eslint-disable no-redeclare */ - /* global window */ - if (typeof window !== 'undefined') esprima = window.esprima; -} - -var Type = __webpack_require__(547); - -function resolveJavascriptFunction(data) { - if (data === null) return false; - - try { - var source = '(' + data + ')', - ast = esprima.parse(source, { range: true }); - - if (ast.type !== 'Program' || - ast.body.length !== 1 || - ast.body[0].type !== 'ExpressionStatement' || - (ast.body[0].expression.type !== 'ArrowFunctionExpression' && - ast.body[0].expression.type !== 'FunctionExpression')) { - return false; - } - - return true; - } catch (err) { - return false; - } -} - -function constructJavascriptFunction(data) { - /*jslint evil:true*/ - - var source = '(' + data + ')', - ast = esprima.parse(source, { range: true }), - params = [], - body; - - if (ast.type !== 'Program' || - ast.body.length !== 1 || - ast.body[0].type !== 'ExpressionStatement' || - (ast.body[0].expression.type !== 'ArrowFunctionExpression' && - ast.body[0].expression.type !== 'FunctionExpression')) { - throw new Error('Failed to resolve function'); - } - - ast.body[0].expression.params.forEach(function (param) { - params.push(param.name); - }); - - body = ast.body[0].expression.body.range; - - // Esprima's ranges include the first '{' and the last '}' characters on - // function expressions. So cut them out. - if (ast.body[0].expression.body.type === 'BlockStatement') { - /*eslint-disable no-new-func*/ - return new Function(params, source.slice(body[0] + 1, body[1] - 1)); - } - // ES6 arrow functions can omit the BlockStatement. In that case, just return - // the body. - /*eslint-disable no-new-func*/ - return new Function(params, 'return ' + source.slice(body[0], body[1])); -} - -function representJavascriptFunction(object /*, style*/) { - return object.toString(); -} - -function isFunction(object) { - return Object.prototype.toString.call(object) === '[object Function]'; -} - -module.exports = new Type('tag:yaml.org,2002:js/function', { - kind: 'scalar', - resolve: resolveJavascriptFunction, - construct: constructJavascriptFunction, - predicate: isFunction, - represent: representJavascriptFunction -}); - - -/***/ }), -/* 571 */, -/* 572 */, -/* 573 */, -/* 574 */, -/* 575 */, -/* 576 */, -/* 577 */, -/* 578 */, -/* 579 */, -/* 580 */, -/* 581 */, -/* 582 */, -/* 583 */, -/* 584 */, -/* 585 */, -/* 586 */, -/* 587 */, -/* 588 */, -/* 589 */, -/* 590 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = __importStar(__webpack_require__(747)); -const core_1 = __webpack_require__(470); -const path_1 = __webpack_require__(622); -const utils_1 = __webpack_require__(870); -/** - * Creates a specification that describes how each file that is part of the artifact will be uploaded - * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server - * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file - * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact - */ -function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { - utils_1.checkArtifactName(artifactName); - const specifications = []; - if (!fs.existsSync(rootDirectory)) { - throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); - } - if (!fs.lstatSync(rootDirectory).isDirectory()) { - throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); - } - // Normalize and resolve, this allows for either absolute or relative paths to be used - rootDirectory = path_1.normalize(rootDirectory); - rootDirectory = path_1.resolve(rootDirectory); - /* - Example to demonstrate behavior - - Input: - artifactName: my-artifact - rootDirectory: '/home/user/files/plz-upload' - artifactFiles: [ - '/home/user/files/plz-upload/file1.txt', - '/home/user/files/plz-upload/file2.txt', - '/home/user/files/plz-upload/dir/file3.txt' - ] - - Output: - specifications: [ - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], - ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] - ] - */ - for (let file of artifactFiles) { - if (!fs.existsSync(file)) { - throw new Error(`File ${file} does not exist`); - } - if (!fs.lstatSync(file).isDirectory()) { - // Normalize and resolve, this allows for either absolute or relative paths to be used - file = path_1.normalize(file); - file = path_1.resolve(file); - if (!file.startsWith(rootDirectory)) { - throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); - } - // Check for forbidden characters in file paths that will be rejected during upload - const uploadPath = file.replace(rootDirectory, ''); - utils_1.checkArtifactFilePath(uploadPath); - /* - uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all - be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts - - path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt - join('artifact-name/', 'file-to-upload.txt') - join('artifact-name/', '/file-to-upload.txt') - join('artifact-name', 'file-to-upload.txt') - join('artifact-name', '/file-to-upload.txt') - */ - specifications.push({ - absoluteFilePath: file, - uploadFilePath: path_1.join(artifactName, uploadPath) - }); - } - else { - // Directories are rejected by the server during upload - core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); - } - } - return specifications; -} -exports.getUploadSpecification = getUploadSpecification; -//# sourceMappingURL=upload-specification.js.map - -/***/ }), -/* 591 */, -/* 592 */, -/* 593 */, -/* 594 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { getTree, getTreeForProject } = __webpack_require__(333); -const { - getOrderedListForTree, - getOrderedListForProject -} = __webpack_require__(15); -const { readDefinitionFile } = __webpack_require__(846); -const { parentChainFromNode } = __webpack_require__(495); -const { treatUrl } = __webpack_require__(830); - -module.exports = { - getTree, - getTreeForProject, - getOrderedListForTree, - getOrderedListForProject, - readDefinitionFile, - parentChainFromNode, - treatUrl -}; - - -/***/ }), -/* 595 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - - -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const mkdirs = __webpack_require__(884).mkdirs -const pathExists = __webpack_require__(322).pathExists -const utimesMillis = __webpack_require__(946).utimesMillis -const stat = __webpack_require__(127) - -function copy (src, dest, opts, cb) { - if (typeof opts === 'function' && !cb) { - cb = opts - opts = {} - } else if (typeof opts === 'function') { - opts = { filter: opts } - } - - cb = cb || function () {} - opts = opts || {} - - opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now - opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - - // Warn about using preserveTimestamps on 32-bit node - if (opts.preserveTimestamps && process.arch === 'ia32') { - console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n - see https://github.com/jprichardson/node-fs-extra/issues/269`) - } - - stat.checkPaths(src, dest, 'copy', (err, stats) => { - if (err) return cb(err) - const { srcStat, destStat } = stats - stat.checkParentPaths(src, srcStat, dest, 'copy', err => { - if (err) return cb(err) - if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) - return checkParentDir(destStat, src, dest, opts, cb) - }) - }) -} - -function checkParentDir (destStat, src, dest, opts, cb) { - const destParent = path.dirname(dest) - pathExists(destParent, (err, dirExists) => { - if (err) return cb(err) - if (dirExists) return startCopy(destStat, src, dest, opts, cb) - mkdirs(destParent, err => { - if (err) return cb(err) - return startCopy(destStat, src, dest, opts, cb) - }) - }) -} - -function handleFilter (onInclude, destStat, src, dest, opts, cb) { - Promise.resolve(opts.filter(src, dest)).then(include => { - if (include) return onInclude(destStat, src, dest, opts, cb) - return cb() - }, error => cb(error)) -} - -function startCopy (destStat, src, dest, opts, cb) { - if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) - return getStats(destStat, src, dest, opts, cb) -} - -function getStats (destStat, src, dest, opts, cb) { - const stat = opts.dereference ? fs.stat : fs.lstat - stat(src, (err, srcStat) => { - if (err) return cb(err) - - if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isFile() || - srcStat.isCharacterDevice() || - srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) - else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) - }) -} - -function onFile (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return copyFile(srcStat, src, dest, opts, cb) - return mayCopyFile(srcStat, src, dest, opts, cb) -} - -function mayCopyFile (srcStat, src, dest, opts, cb) { - if (opts.overwrite) { - fs.unlink(dest, err => { - if (err) return cb(err) - return copyFile(srcStat, src, dest, opts, cb) - }) - } else if (opts.errorOnExist) { - return cb(new Error(`'${dest}' already exists`)) - } else return cb() -} - -function copyFile (srcStat, src, dest, opts, cb) { - fs.copyFile(src, dest, err => { - if (err) return cb(err) - if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) - return setDestMode(dest, srcStat.mode, cb) - }) -} - -function handleTimestampsAndMode (srcMode, src, dest, cb) { - // Make sure the file is writable before setting the timestamp - // otherwise open fails with EPERM when invoked with 'r+' - // (through utimes call) - if (fileIsNotWritable(srcMode)) { - return makeFileWritable(dest, srcMode, err => { - if (err) return cb(err) - return setDestTimestampsAndMode(srcMode, src, dest, cb) - }) - } - return setDestTimestampsAndMode(srcMode, src, dest, cb) -} - -function fileIsNotWritable (srcMode) { - return (srcMode & 0o200) === 0 -} - -function makeFileWritable (dest, srcMode, cb) { - return setDestMode(dest, srcMode | 0o200, cb) -} - -function setDestTimestampsAndMode (srcMode, src, dest, cb) { - setDestTimestamps(src, dest, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) -} - -function setDestMode (dest, srcMode, cb) { - return fs.chmod(dest, srcMode, cb) -} - -function setDestTimestamps (src, dest, cb) { - // The initial srcStat.atime cannot be trusted - // because it is modified by the read(2) system call - // (See https://nodejs.org/api/fs.html#fs_stat_time_values) - fs.stat(src, (err, updatedSrcStat) => { - if (err) return cb(err) - return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) - }) -} - -function onDir (srcStat, destStat, src, dest, opts, cb) { - if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) - if (destStat && !destStat.isDirectory()) { - return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) - } - return copyDir(src, dest, opts, cb) -} - -function mkDirAndCopy (srcMode, src, dest, opts, cb) { - fs.mkdir(dest, err => { - if (err) return cb(err) - copyDir(src, dest, opts, err => { - if (err) return cb(err) - return setDestMode(dest, srcMode, cb) - }) - }) -} - -function copyDir (src, dest, opts, cb) { - fs.readdir(src, (err, items) => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) -} - -function copyDirItems (items, src, dest, opts, cb) { - const item = items.pop() - if (!item) return cb() - return copyDirItem(items, item, src, dest, opts, cb) -} - -function copyDirItem (items, item, src, dest, opts, cb) { - const srcItem = path.join(src, item) - const destItem = path.join(dest, item) - stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => { - if (err) return cb(err) - const { destStat } = stats - startCopy(destStat, srcItem, destItem, opts, err => { - if (err) return cb(err) - return copyDirItems(items, src, dest, opts, cb) - }) - }) -} - -function onLink (destStat, src, dest, opts, cb) { - fs.readlink(src, (err, resolvedSrc) => { - if (err) return cb(err) - if (opts.dereference) { - resolvedSrc = path.resolve(process.cwd(), resolvedSrc) - } - - if (!destStat) { - return fs.symlink(resolvedSrc, dest, cb) - } else { - fs.readlink(dest, (err, resolvedDest) => { - if (err) { - // dest exists and is a regular file or directory, - // Windows may throw UNKNOWN error. If dest already exists, - // fs throws error anyway, so no need to guard against it here. - if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) - return cb(err) - } - if (opts.dereference) { - resolvedDest = path.resolve(process.cwd(), resolvedDest) - } - if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { - return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) - } - - // do not copy if src is a subdir of dest since unlinking - // dest in this case would result in removing src contents - // and therefore a broken symlink would be created. - if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { - return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) - } - return copyLink(resolvedSrc, dest, cb) - }) - } - }) -} - -function copyLink (resolvedSrc, dest, cb) { - fs.unlink(dest, err => { - if (err) return cb(err) - return fs.symlink(resolvedSrc, dest, cb) - }) -} - -module.exports = copy - - -/***/ }), -/* 596 */, -/* 597 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const pathHelper = __webpack_require__(972); -const internal_match_kind_1 = __webpack_require__(327); -const IS_WINDOWS = process.platform === 'win32'; -/** - * Given an array of patterns, returns an array of paths to search. - * Duplicates and paths under other included paths are filtered out. - */ -function getSearchPaths(patterns) { - // Ignore negate patterns - patterns = patterns.filter(x => !x.negate); - // Create a map of all search paths - const searchPathMap = {}; - for (const pattern of patterns) { - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - searchPathMap[key] = 'candidate'; - } - const result = []; - for (const pattern of patterns) { - // Check if already included - const key = IS_WINDOWS - ? pattern.searchPath.toUpperCase() - : pattern.searchPath; - if (searchPathMap[key] === 'included') { - continue; - } - // Check for an ancestor search path - let foundAncestor = false; - let tempKey = key; - let parent = pathHelper.dirname(tempKey); - while (parent !== tempKey) { - if (searchPathMap[parent]) { - foundAncestor = true; - break; - } - tempKey = parent; - parent = pathHelper.dirname(tempKey); - } - // Include the search pattern in the result - if (!foundAncestor) { - result.push(pattern.searchPath); - searchPathMap[key] = 'included'; - } - } - return result; -} -exports.getSearchPaths = getSearchPaths; -/** - * Matches the patterns against the path - */ -function match(patterns, itemPath) { - let result = internal_match_kind_1.MatchKind.None; - for (const pattern of patterns) { - if (pattern.negate) { - result &= ~pattern.match(itemPath); - } - else { - result |= pattern.match(itemPath); - } - } - return result; -} -exports.match = match; -/** - * Checks whether to descend further into the directory - */ -function partialMatch(patterns, itemPath) { - return patterns.some(x => !x.negate && x.partialMatch(itemPath)); -} -exports.partialMatch = partialMatch; -//# sourceMappingURL=internal-pattern-helper.js.map - -/***/ }), -/* 598 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -var fs = __webpack_require__(747) -var polyfills = __webpack_require__(250) -var legacy = __webpack_require__(466) -var clone = __webpack_require__(943) - -var util = __webpack_require__(669) - -/* istanbul ignore next - node 0.x polyfill */ -var gracefulQueue -var previousSymbol - -/* istanbul ignore else - node 0.x polyfill */ -if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { - gracefulQueue = Symbol.for('graceful-fs.queue') - // This is used in testing by future versions - previousSymbol = Symbol.for('graceful-fs.previous') -} else { - gracefulQueue = '___graceful-fs.queue' - previousSymbol = '___graceful-fs.previous' -} - -function noop () {} - -function publishQueue(context, queue) { - Object.defineProperty(context, gracefulQueue, { - get: function() { - return queue - } - }) -} - -var debug = noop -if (util.debuglog) - debug = util.debuglog('gfs4') -else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) - debug = function() { - var m = util.format.apply(util, arguments) - m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') - console.error(m) - } - -// Once time initialization -if (!fs[gracefulQueue]) { - // This queue can be shared by multiple loaded instances - var queue = global[gracefulQueue] || [] - publishQueue(fs, queue) - - // Patch fs.close/closeSync to shared queue version, because we need - // to retry() whenever a close happens *anywhere* in the program. - // This is essential when multiple graceful-fs instances are - // in play at the same time. - fs.close = (function (fs$close) { - function close (fd, cb) { - return fs$close.call(fs, fd, function (err) { - // This function uses the graceful-fs shared queue - if (!err) { - retry() - } - - if (typeof cb === 'function') - cb.apply(this, arguments) - }) - } - - Object.defineProperty(close, previousSymbol, { - value: fs$close - }) - return close - })(fs.close) - - fs.closeSync = (function (fs$closeSync) { - function closeSync (fd) { - // This function uses the graceful-fs shared queue - fs$closeSync.apply(fs, arguments) - retry() - } - - Object.defineProperty(closeSync, previousSymbol, { - value: fs$closeSync - }) - return closeSync - })(fs.closeSync) - - if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { - process.on('exit', function() { - debug(fs[gracefulQueue]) - __webpack_require__(357).equal(fs[gracefulQueue].length, 0) - }) - } -} - -if (!global[gracefulQueue]) { - publishQueue(global, fs[gracefulQueue]); -} - -module.exports = patch(clone(fs)) -if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { - module.exports = patch(fs) - fs.__patched = true; -} - -function patch (fs) { - // Everything that references the open() function needs to be in here - polyfills(fs) - fs.gracefulify = patch - - fs.createReadStream = createReadStream - fs.createWriteStream = createWriteStream - var fs$readFile = fs.readFile - fs.readFile = readFile - function readFile (path, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$readFile(path, options, cb) - - function go$readFile (path, options, cb) { - return fs$readFile(path, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readFile, [path, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$writeFile = fs.writeFile - fs.writeFile = writeFile - function writeFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$writeFile(path, data, options, cb) - - function go$writeFile (path, data, options, cb) { - return fs$writeFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$writeFile, [path, data, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$appendFile = fs.appendFile - if (fs$appendFile) - fs.appendFile = appendFile - function appendFile (path, data, options, cb) { - if (typeof options === 'function') - cb = options, options = null - - return go$appendFile(path, data, options, cb) - - function go$appendFile (path, data, options, cb) { - return fs$appendFile(path, data, options, function (err) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$appendFile, [path, data, options, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - var fs$readdir = fs.readdir - fs.readdir = readdir - function readdir (path, options, cb) { - var args = [path] - if (typeof options !== 'function') { - args.push(options) - } else { - cb = options - } - args.push(go$readdir$cb) - - return go$readdir(args) - - function go$readdir$cb (err, files) { - if (files && files.sort) - files.sort() - - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$readdir, [args]]) - - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - } - } - - function go$readdir (args) { - return fs$readdir.apply(fs, args) - } - - if (process.version.substr(0, 4) === 'v0.8') { - var legStreams = legacy(fs) - ReadStream = legStreams.ReadStream - WriteStream = legStreams.WriteStream - } - - var fs$ReadStream = fs.ReadStream - if (fs$ReadStream) { - ReadStream.prototype = Object.create(fs$ReadStream.prototype) - ReadStream.prototype.open = ReadStream$open - } - - var fs$WriteStream = fs.WriteStream - if (fs$WriteStream) { - WriteStream.prototype = Object.create(fs$WriteStream.prototype) - WriteStream.prototype.open = WriteStream$open - } - - Object.defineProperty(fs, 'ReadStream', { - get: function () { - return ReadStream - }, - set: function (val) { - ReadStream = val - }, - enumerable: true, - configurable: true - }) - Object.defineProperty(fs, 'WriteStream', { - get: function () { - return WriteStream - }, - set: function (val) { - WriteStream = val - }, - enumerable: true, - configurable: true - }) - - // legacy names - var FileReadStream = ReadStream - Object.defineProperty(fs, 'FileReadStream', { - get: function () { - return FileReadStream - }, - set: function (val) { - FileReadStream = val - }, - enumerable: true, - configurable: true - }) - var FileWriteStream = WriteStream - Object.defineProperty(fs, 'FileWriteStream', { - get: function () { - return FileWriteStream - }, - set: function (val) { - FileWriteStream = val - }, - enumerable: true, - configurable: true - }) - - function ReadStream (path, options) { - if (this instanceof ReadStream) - return fs$ReadStream.apply(this, arguments), this - else - return ReadStream.apply(Object.create(ReadStream.prototype), arguments) - } - - function ReadStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - if (that.autoClose) - that.destroy() - - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - that.read() - } - }) - } - - function WriteStream (path, options) { - if (this instanceof WriteStream) - return fs$WriteStream.apply(this, arguments), this - else - return WriteStream.apply(Object.create(WriteStream.prototype), arguments) - } - - function WriteStream$open () { - var that = this - open(that.path, that.flags, that.mode, function (err, fd) { - if (err) { - that.destroy() - that.emit('error', err) - } else { - that.fd = fd - that.emit('open', fd) - } - }) - } - - function createReadStream (path, options) { - return new fs.ReadStream(path, options) - } - - function createWriteStream (path, options) { - return new fs.WriteStream(path, options) - } - - var fs$open = fs.open - fs.open = open - function open (path, flags, mode, cb) { - if (typeof mode === 'function') - cb = mode, mode = null - - return go$open(path, flags, mode, cb) - - function go$open (path, flags, mode, cb) { - return fs$open(path, flags, mode, function (err, fd) { - if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) - enqueue([go$open, [path, flags, mode, cb]]) - else { - if (typeof cb === 'function') - cb.apply(this, arguments) - retry() - } - }) - } - } - - return fs -} - -function enqueue (elem) { - debug('ENQUEUE', elem[0].name, elem[1]) - fs[gracefulQueue].push(elem) -} - -function retry () { - var elem = fs[gracefulQueue].shift() - if (elem) { - debug('RETRY', elem[0].name, elem[1]) - elem[0].apply(null, elem[1]) - } -} - - -/***/ }), -/* 599 */, -/* 600 */, -/* 601 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -Object.defineProperty(exports, "__esModule", { value: true }); -const core = __webpack_require__(470); -/** - * Returns a copy with defaults filled in. - */ -function getOptions(copy) { - const result = { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; - if (copy) { - if (typeof copy.followSymbolicLinks === 'boolean') { - result.followSymbolicLinks = copy.followSymbolicLinks; - core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); - } - if (typeof copy.implicitDescendants === 'boolean') { - result.implicitDescendants = copy.implicitDescendants; - core.debug(`implicitDescendants '${result.implicitDescendants}'`); - } - if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { - result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; - core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); - } - } - return result; -} -exports.getOptions = getOptions; -//# sourceMappingURL=internal-glob-options-helper.js.map - -/***/ }), -/* 602 */, -/* 603 */, -/* 604 */, -/* 605 */ -/***/ (function(module) { - -module.exports = require("http"); - -/***/ }), -/* 606 */, -/* 607 */, -/* 608 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { - -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = __importStar(__webpack_require__(747)); -const core = __importStar(__webpack_require__(470)); -const tmp = __importStar(__webpack_require__(875)); -const stream = __importStar(__webpack_require__(794)); -const utils_1 = __webpack_require__(870); -const config_variables_1 = __webpack_require__(401); -const util_1 = __webpack_require__(669); -const url_1 = __webpack_require__(835); -const perf_hooks_1 = __webpack_require__(630); -const status_reporter_1 = __webpack_require__(176); -const http_manager_1 = __webpack_require__(452); -const upload_gzip_1 = __webpack_require__(647); -const stat = util_1.promisify(fs.stat); -class UploadHttpClient { - constructor() { - this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); - this.statusReporter = new status_reporter_1.StatusReporter(10000); - } - /** - * Creates a file container for the new artifact in the remote blob storage/file service - * @param {string} artifactName Name of the artifact being created - * @returns The response from the Artifact Service if the file container was successfully created - */ - createArtifactInFileContainer(artifactName) { - return __awaiter(this, void 0, void 0, function* () { - const parameters = { - Type: 'actions_storage', - Name: artifactName - }; - const data = JSON.stringify(parameters, null, 2); - const artifactUrl = utils_1.getArtifactUrl(); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); - const rawResponse = yield client.post(artifactUrl, data, headers); - const body = yield rawResponse.readBody(); - if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) { - return JSON.parse(body); - } - else if (utils_1.isForbiddenStatusCode(rawResponse.message.statusCode)) { - // if a 403 is returned when trying to create a file container, the customer has exceeded - // their storage quota so no new artifact containers can be created - throw new Error(`Artifact storage quota has been hit. Unable to upload any new artifacts`); - } - else { - utils_1.displayHttpDiagnostics(rawResponse); - throw new Error(`Unable to create a container for the artifact ${artifactName} at ${artifactUrl}`); - } - }); - } - /** - * Concurrently upload all of the files in chunks - * @param {string} uploadUrl Base Url for the artifact that was created - * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded - * @returns The size of all the files uploaded in bytes - */ - uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { - return __awaiter(this, void 0, void 0, function* () { - const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); - const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); - core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); - const parameters = []; - // by default, file uploads will continue if there is an error unless specified differently in the options - let continueOnError = true; - if (options) { - if (options.continueOnError === false) { - continueOnError = false; - } - } - // prepare the necessary parameters to upload all the files - for (const file of filesToUpload) { - const resourceUrl = new url_1.URL(uploadUrl); - resourceUrl.searchParams.append('itemPath', file.uploadFilePath); - parameters.push({ - file: file.absoluteFilePath, - resourceUrl: resourceUrl.toString(), - maxChunkSize: MAX_CHUNK_SIZE, - continueOnError - }); - } - const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; - const failedItemsToReport = []; - let currentFile = 0; - let completedFiles = 0; - let uploadFileSize = 0; - let totalFileSize = 0; - let abortPendingFileUploads = false; - this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); - this.statusReporter.start(); - // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors - yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { - while (currentFile < filesToUpload.length) { - const currentFileParameters = parameters[currentFile]; - currentFile += 1; - if (abortPendingFileUploads) { - failedItemsToReport.push(currentFileParameters.file); - continue; - } - const startTime = perf_hooks_1.performance.now(); - const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); - if (core.isDebug()) { - core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); - } - uploadFileSize += uploadFileResult.successfulUploadSize; - totalFileSize += uploadFileResult.totalSize; - if (uploadFileResult.isSuccess === false) { - failedItemsToReport.push(currentFileParameters.file); - if (!continueOnError) { - // fail fast - core.error(`aborting artifact upload`); - abortPendingFileUploads = true; - } - } - this.statusReporter.incrementProcessedCount(); - } - }))); - this.statusReporter.stop(); - // done uploading, safety dispose all connections - this.uploadHttpManager.disposeAndReplaceAllClients(); - core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); - return { - uploadSize: uploadFileSize, - totalSize: totalFileSize, - failedItems: failedItemsToReport - }; - }); - } - /** - * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. - * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls - * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls - * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded - * @returns The size of the file that was uploaded in bytes along with any failed uploads - */ - uploadFileAsync(httpClientIndex, parameters) { - return __awaiter(this, void 0, void 0, function* () { - const totalFileSize = (yield stat(parameters.file)).size; - let offset = 0; - let isUploadSuccessful = true; - let failedChunkSizes = 0; - let uploadFileSize = 0; - let isGzip = true; - // the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O - // for creating a new GZip file, an in-memory buffer is used for compression - if (totalFileSize < 65536) { - const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); - //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, - // it will not properly get reset to the start of the stream if a chunk upload needs to be retried - let openUploadStream; - if (totalFileSize < buffer.byteLength) { - // compression did not help with reducing the size, use a readable stream from the original file for upload - openUploadStream = () => fs.createReadStream(parameters.file); - isGzip = false; - uploadFileSize = totalFileSize; - } - else { - // create a readable stream using a PassThrough stream that is both readable and writable - openUploadStream = () => { - const passThrough = new stream.PassThrough(); - passThrough.end(buffer); - return passThrough; - }; - uploadFileSize = buffer.byteLength; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // chunk failed to upload - isUploadSuccessful = false; - failedChunkSizes += uploadFileSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - } - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } - else { - // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the - // npm tmp-promise package and this file gets used to create a GZipped file - const tempFile = yield tmp.file(); - // create a GZip file of the original file being uploaded, the original file should not be modified in any way - uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); - let uploadFilePath = tempFile.path; - // compression did not help with size reduction, use the original file for upload and delete the temp GZip file - if (totalFileSize < uploadFileSize) { - uploadFileSize = totalFileSize; - uploadFilePath = parameters.file; - isGzip = false; - } - let abortFileUpload = false; - // upload only a single chunk at a time - while (offset < uploadFileSize) { - const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); - // if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status - if (uploadFileSize > 104857600) { - this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize); - } - const start = offset; - const end = offset + chunkSize - 1; - offset += parameters.maxChunkSize; - if (abortFileUpload) { - // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed - failedChunkSizes += chunkSize; - continue; - } - const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { - start, - end, - autoClose: false - }), start, end, uploadFileSize, isGzip, totalFileSize); - if (!result) { - // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was - // successfully uploaded so the server may report a different size for what was uploaded - isUploadSuccessful = false; - failedChunkSizes += chunkSize; - core.warning(`Aborting upload for ${parameters.file} due to failure`); - abortFileUpload = true; - } - } - // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by - // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about - yield tempFile.cleanup(); - return { - isSuccess: isUploadSuccessful, - successfulUploadSize: uploadFileSize - failedChunkSizes, - totalSize: totalFileSize - }; - } - }); - } - /** - * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code - * indicates a retryable status, we try to upload the chunk as well - * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls - * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to - * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded - * @param {number} start Starting byte index of file that the chunk belongs to - * @param {number} end Ending byte index of file that the chunk belongs to - * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded - * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream - * @param {number} totalFileSize Original total size of the file that is being uploaded - * @returns if the chunk was successfully uploaded - */ - uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { - return __awaiter(this, void 0, void 0, function* () { - // prepare all the necessary headers before making any http call - const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize)); - const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { - const client = this.uploadHttpManager.getClient(httpClientIndex); - return yield client.sendStream('PUT', resourceUrl, openStream(), headers); - }); - let retryCount = 0; - const retryLimit = config_variables_1.getRetryLimit(); - // Increments the current retry count and then checks if the retry limit has been reached - // If there have been too many retries, fail so the download stops - const incrementAndCheckRetryLimit = (response) => { - retryCount++; - if (retryCount > retryLimit) { - if (response) { - utils_1.displayHttpDiagnostics(response); - } - core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); - return true; - } - return false; - }; - const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { - this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); - if (retryAfterValue) { - core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); - yield new Promise(resolve => setTimeout(resolve, retryAfterValue)); - } - else { - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); - core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); - yield new Promise(resolve => setTimeout(resolve, backoffTime)); - } - core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); - return; - }); - // allow for failed chunks to be retried multiple times - while (retryCount <= retryLimit) { - let response; - try { - response = yield uploadChunkRequest(); - } - catch (error) { - // if an error is caught, it is usually indicative of a timeout so retry the upload - core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); - // eslint-disable-next-line no-console - console.log(error); - if (incrementAndCheckRetryLimit()) { - return false; - } - yield backOff(); - continue; - } - // Always read the body of the response. There is potential for a resource leak if the body is not read which will - // result in the connection remaining open along with unintended consequences when trying to dispose of the client - yield response.readBody(); - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - return true; - } - else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { - core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); - if (incrementAndCheckRetryLimit(response)) { - return false; - } - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) - : yield backOff(); - } - else { - core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); - utils_1.displayHttpDiagnostics(response); - return false; - } - } - return false; - }); - } - /** - * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. - * Updating the size indicates that we are done uploading all the contents of the artifact - */ - patchArtifactSize(size, artifactName) { - return __awaiter(this, void 0, void 0, function* () { - const headers = utils_1.getUploadHeaders('application/json', false); - const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); - resourceUrl.searchParams.append('artifactName', artifactName); - const parameters = { Size: size }; - const data = JSON.stringify(parameters, null, 2); - core.debug(`URL is ${resourceUrl.toString()}`); - // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately - const client = this.uploadHttpManager.getClient(0); - const response = yield client.patch(resourceUrl.toString(), data, headers); - const body = yield response.readBody(); - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { - core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); - } - else if (response.message.statusCode === 404) { - throw new Error(`An Artifact with the name ${artifactName} was not found`); - } - else { - utils_1.displayHttpDiagnostics(response); - core.info(body); - throw new Error(`Unable to finish uploading artifact ${artifactName} to ${resourceUrl}`); - } - }); - } -} -exports.UploadHttpClient = UploadHttpClient; -//# sourceMappingURL=upload-http-client.js.map + } +} + +module.exports = { executeBuild, executeBuildSpecificCommand }; + + +/***/ }), +/* 12 */, +/* 13 */, +/* 14 */, +/* 15 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { logger } = __webpack_require__(79); +const { + prepareEnv, + createGithubInformationObject, + getEvent +} = __webpack_require__(8); +const { start } = __webpack_require__(754); +const { createCommonConfig } = __webpack_require__(668); +const { getProcessEnvVariable } = __webpack_require__(867); +const fse = __webpack_require__(232); + +/** + * Executes single flow + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {Object} pullRequestData information from pull request event + * @param {String} rootFolder path to store flow data/projects + * @param {Boolean} isArchiveArtifacts + */ +async function execute( + token, + octokit, + env, + eventData, + rootFolder, + isArchiveArtifacts +) { + const githubInformation = createGithubInformationObject(eventData, env); + const config = await createCommonConfig(githubInformation, rootFolder, env); + const context = { token, octokit, config }; + await start(context, isArchiveArtifacts); +} + +/** + * Prepares execution when this is triggered from a github action's event + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + */ +async function executeFromEvent(token, octokit, env) { + const eventDataStr = await fse.readFile( + getProcessEnvVariable("GITHUB_EVENT_PATH"), + "utf8" + ); + const eventData = JSON.parse(eventDataStr); + await execute(token, octokit, env, eventData, undefined, true); +} + +/** + * Prepares execution when this is triggered from command line + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {String} rootFolder path to store flow data/projects + * @param {String} eventUrl event url + */ +async function executeLocally(token, octokit, env, rootFolder, eventUrl) { + logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); + + const eventData = await getEvent(octokit, eventUrl); + prepareEnv(env, eventUrl, eventData); + await execute(token, octokit, env, eventData, rootFolder, false); +} + +module.exports = { executeLocally, executeFromEvent }; + + +/***/ }), +/* 16 */ +/***/ (function(module) { + +module.exports = require("tls"); + +/***/ }), +/* 17 */, +/* 18 */ +/***/ (function(module) { + +module.exports = eval("require")("encoding"); + + +/***/ }), +/* 19 */, +/* 20 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const cp = __webpack_require__(129); +const parse = __webpack_require__(568); +const enoent = __webpack_require__(881); + +function spawn(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const spawned = cp.spawn(parsed.command, parsed.args, parsed.options); + + // Hook into child process "exit" event to emit an error if the command + // does not exists, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + enoent.hookChildProcess(spawned, parsed); + + return spawned; +} + +function spawnSync(command, args, options) { + // Parse the arguments + const parsed = parse(command, args, options); + + // Spawn the child process + const result = cp.spawnSync(parsed.command, parsed.args, parsed.options); + + // Analyze if the command does not exist, see: https://github.com/IndigoUnited/node-cross-spawn/issues/16 + result.error = result.error || enoent.verifyENOENTSync(result.status, parsed); + + return result; +} + +module.exports = spawn; +module.exports.spawn = spawn; +module.exports.sync = spawnSync; + +module.exports._parse = parse; +module.exports._enoent = enoent; + + +/***/ }), +/* 21 */, +/* 22 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const http = __webpack_require__(605); +const https = __webpack_require__(211); + +function checkUrlExist(url) { + return new Promise(resolve => { + (url.startsWith("https://") ? https : http).get(url, response => { + resolve(200 === response.statusCode); + }); + }); +} + +module.exports = { + checkUrlExist +}; + + +/***/ }), +/* 23 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Standard YAML's JSON schema. +// http://www.yaml.org/spec/1.2/spec.html#id2803231 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, this schema is not such strict as defined in the YAML specification. +// It allows numbers in binary notaion, use `Null` and `NULL` as `null`, etc. + + + + + +var Schema = __webpack_require__(43); + + +module.exports = new Schema({ + include: [ + __webpack_require__(581) + ], + implicit: [ + __webpack_require__(809), + __webpack_require__(228), + __webpack_require__(44), + __webpack_require__(417) + ] +}); + + +/***/ }), +/* 24 */, +/* 25 */, +/* 26 */, +/* 27 */, +/* 28 */, +/* 29 */, +/* 30 */, +/* 31 */, +/* 32 */, +/* 33 */, +/* 34 */, +/* 35 */, +/* 36 */, +/* 37 */, +/* 38 */, +/* 39 */ +/***/ (function(module) { + +"use strict"; + +module.exports = opts => { + opts = opts || {}; + + const env = opts.env || process.env; + const platform = opts.platform || process.platform; + + if (platform !== 'win32') { + return 'PATH'; + } + + return Object.keys(env).find(x => x.toUpperCase() === 'PATH') || 'Path'; +}; + + +/***/ }), +/* 40 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + + if (typeof process === "object" && "version" in process) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + + return ""; +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 41 */, +/* 42 */, +/* 43 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +/*eslint-disable max-len*/ + +var common = __webpack_require__(740); +var YAMLException = __webpack_require__(556); +var Type = __webpack_require__(945); + + +function compileList(schema, name, result) { + var exclude = []; + + schema.include.forEach(function (includedSchema) { + result = compileList(includedSchema, name, result); + }); + + schema[name].forEach(function (currentType) { + result.forEach(function (previousType, previousIndex) { + if (previousType.tag === currentType.tag && previousType.kind === currentType.kind) { + exclude.push(previousIndex); + } + }); + + result.push(currentType); + }); + + return result.filter(function (type, index) { + return exclude.indexOf(index) === -1; + }); +} + + +function compileMap(/* lists... */) { + var result = { + scalar: {}, + sequence: {}, + mapping: {}, + fallback: {} + }, index, length; + + function collectType(type) { + result[type.kind][type.tag] = result['fallback'][type.tag] = type; + } + + for (index = 0, length = arguments.length; index < length; index += 1) { + arguments[index].forEach(collectType); + } + return result; +} + + +function Schema(definition) { + this.include = definition.include || []; + this.implicit = definition.implicit || []; + this.explicit = definition.explicit || []; + + this.implicit.forEach(function (type) { + if (type.loadKind && type.loadKind !== 'scalar') { + throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + } + }); + + this.compiledImplicit = compileList(this, 'implicit', []); + this.compiledExplicit = compileList(this, 'explicit', []); + this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit); +} + + +Schema.DEFAULT = null; + + +Schema.create = function createSchema() { + var schemas, types; + + switch (arguments.length) { + case 1: + schemas = Schema.DEFAULT; + types = arguments[0]; + break; + + case 2: + schemas = arguments[0]; + types = arguments[1]; + break; + + default: + throw new YAMLException('Wrong number of arguments for Schema.create function'); + } + + schemas = common.toArray(schemas); + types = common.toArray(types); + + if (!schemas.every(function (schema) { return schema instanceof Schema; })) { + throw new YAMLException('Specified list of super schemas (or a single Schema object) contains a non-Schema object.'); + } + + if (!types.every(function (type) { return type instanceof Type; })) { + throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); + } + + return new Schema({ + include: schemas, + explicit: types + }); +}; + + +module.exports = Schema; + + +/***/ }), +/* 44 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var common = __webpack_require__(740); +var Type = __webpack_require__(945); + +function isHexCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) || + ((0x41/* A */ <= c) && (c <= 0x46/* F */)) || + ((0x61/* a */ <= c) && (c <= 0x66/* f */)); +} + +function isOctCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x37/* 7 */)); +} + +function isDecCode(c) { + return ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)); +} + +function resolveYamlInteger(data) { + if (data === null) return false; + + var max = data.length, + index = 0, + hasDigits = false, + ch; + + if (!max) return false; + + ch = data[index]; -/***/ }), -/* 609 */, -/* 610 */, -/* 611 */, -/* 612 */, -/* 613 */, -/* 614 */ -/***/ (function(module) { + // sign + if (ch === '-' || ch === '+') { + ch = data[++index]; + } -module.exports = require("events"); + if (ch === '0') { + // 0 + if (index + 1 === max) return true; + ch = data[++index]; -/***/ }), -/* 615 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + // base 2, base 8, base 16 -"use strict"; + if (ch === 'b') { + // base 2 + index++; + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (ch !== '0' && ch !== '1') return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } -const u = __webpack_require__(0).fromCallback -const fs = __webpack_require__(598) -const path = __webpack_require__(622) -const mkdir = __webpack_require__(884) -const remove = __webpack_require__(723) -const emptyDir = u(function emptyDir (dir, callback) { - callback = callback || function () {} - fs.readdir(dir, (err, items) => { - if (err) return mkdir.mkdirs(dir, callback) + if (ch === 'x') { + // base 16 + index++; - items = items.map(item => path.join(dir, item)) + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isHexCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } - deleteItem() + // base 8 + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (!isOctCode(data.charCodeAt(index))) return false; + hasDigits = true; + } + return hasDigits && ch !== '_'; + } - function deleteItem () { - const item = items.pop() - if (!item) return callback() - remove.remove(item, err => { - if (err) return callback(err) - deleteItem() - }) + // base 10 (except 0) or base 60 + + // value should not start with `_`; + if (ch === '_') return false; + + for (; index < max; index++) { + ch = data[index]; + if (ch === '_') continue; + if (ch === ':') break; + if (!isDecCode(data.charCodeAt(index))) { + return false; } + hasDigits = true; + } + + // Should have digits and should not end with `_` + if (!hasDigits || ch === '_') return false; + + // if !base60 - done; + if (ch !== ':') return true; + + // base60 almost not used, no needs to optimize + return /^(:[0-5]?[0-9])+$/.test(data.slice(index)); +} + +function constructYamlInteger(data) { + var value = data, sign = 1, ch, base, digits = []; + + if (value.indexOf('_') !== -1) { + value = value.replace(/_/g, ''); + } + + ch = value[0]; + + if (ch === '-' || ch === '+') { + if (ch === '-') sign = -1; + value = value.slice(1); + ch = value[0]; + } + + if (value === '0') return 0; + + if (ch === '0') { + if (value[1] === 'b') return sign * parseInt(value.slice(2), 2); + if (value[1] === 'x') return sign * parseInt(value, 16); + return sign * parseInt(value, 8); + } + + if (value.indexOf(':') !== -1) { + value.split(':').forEach(function (v) { + digits.unshift(parseInt(v, 10)); + }); + + value = 0; + base = 1; + + digits.forEach(function (d) { + value += (d * base); + base *= 60; + }); + + return sign * value; + + } + + return sign * parseInt(value, 10); +} + +function isInteger(object) { + return (Object.prototype.toString.call(object)) === '[object Number]' && + (object % 1 === 0 && !common.isNegativeZero(object)); +} + +module.exports = new Type('tag:yaml.org,2002:int', { + kind: 'scalar', + resolve: resolveYamlInteger, + construct: constructYamlInteger, + predicate: isInteger, + represent: { + binary: function (obj) { return obj >= 0 ? '0b' + obj.toString(2) : '-0b' + obj.toString(2).slice(1); }, + octal: function (obj) { return obj >= 0 ? '0' + obj.toString(8) : '-0' + obj.toString(8).slice(1); }, + decimal: function (obj) { return obj.toString(10); }, + /* eslint-disable max-len */ + hexadecimal: function (obj) { return obj >= 0 ? '0x' + obj.toString(16).toUpperCase() : '-0x' + obj.toString(16).toUpperCase().slice(1); } + }, + defaultStyle: 'decimal', + styleAliases: { + binary: [ 2, 'bin' ], + octal: [ 8, 'oct' ], + decimal: [ 10, 'dec' ], + hexadecimal: [ 16, 'hex' ] + } +}); + + +/***/ }), +/* 45 */, +/* 46 */, +/* 47 */, +/* 48 */, +/* 49 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var wrappy = __webpack_require__(293) +module.exports = wrappy(once) +module.exports.strict = wrappy(onceStrict) + +once.proto = once(function () { + Object.defineProperty(Function.prototype, 'once', { + value: function () { + return once(this) + }, + configurable: true + }) + + Object.defineProperty(Function.prototype, 'onceStrict', { + value: function () { + return onceStrict(this) + }, + configurable: true }) }) -function emptyDirSync (dir) { - let items - try { - items = fs.readdirSync(dir) - } catch { - return mkdir.mkdirsSync(dir) +function once (fn) { + var f = function () { + if (f.called) return f.value + f.called = true + return f.value = fn.apply(this, arguments) + } + f.called = false + return f +} + +function onceStrict (fn) { + var f = function () { + if (f.called) + throw new Error(f.onceError) + f.called = true + return f.value = fn.apply(this, arguments) + } + var name = fn.name || 'Function wrapped with `once`' + f.onceError = name + " shouldn't be called more than once" + f.called = false + return f +} + + +/***/ }), +/* 50 */, +/* 51 */, +/* 52 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const noTreatment = __webpack_require__(981); +const mavenTreatment = __webpack_require__(517); + +function treatCommand(command) { + let libraryToExecute = noTreatment; + if (command.match(/.*mvn .*/)) { + libraryToExecute = mavenTreatment; + } + return libraryToExecute.treat(command); +} + +module.exports = { + treatCommand +}; + + +/***/ }), +/* 53 */, +/* 54 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Adapted from https://github.com/sindresorhus/make-dir +// Copyright (c) Sindre Sorhus (sindresorhus.com) +// Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: +// The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. + +const fs = __webpack_require__(869) +const path = __webpack_require__(622) +const atLeastNode = __webpack_require__(159) + +const useNativeRecursiveOption = atLeastNode('10.12.0') + +// https://github.com/nodejs/node/issues/8987 +// https://github.com/libuv/libuv/pull/1088 +const checkPath = pth => { + if (process.platform === 'win32') { + const pathHasInvalidWinCharacters = /[<>:"|?*]/.test(pth.replace(path.parse(pth).root, '')) + + if (pathHasInvalidWinCharacters) { + const error = new Error(`Path contains invalid characters: ${pth}`) + error.code = 'EINVAL' + throw error + } } +} - items.forEach(item => { - item = path.join(dir, item) - remove.removeSync(item) - }) +const processOptions = options => { + const defaults = { mode: 0o777 } + if (typeof options === 'number') options = { mode: options } + return { ...defaults, ...options } } -module.exports = { - emptyDirSync, - emptydirSync: emptyDirSync, - emptyDir, - emptydir: emptyDir +const permissionError = pth => { + // This replicates the exception of `fs.mkdir` with native the + // `recusive` option when run on an invalid drive under Windows. + const error = new Error(`operation not permitted, mkdir '${pth}'`) + error.code = 'EPERM' + error.errno = -4048 + error.path = pth + error.syscall = 'mkdir' + return error } +module.exports.makeDir = async (input, options) => { + checkPath(input) + options = processOptions(options) -/***/ }), -/* 616 */, -/* 617 */, -/* 618 */, -/* 619 */ -/***/ (function(module) { + if (useNativeRecursiveOption) { + const pth = path.resolve(input) -module.exports = require("constants"); + return fs.mkdir(pth, { + mode: options.mode, + recursive: true + }) + } -/***/ }), -/* 620 */, -/* 621 */ -/***/ (function(module) { + const make = async pth => { + try { + await fs.mkdir(pth, options.mode) + } catch (error) { + if (error.code === 'EPERM') { + throw error + } -"use strict"; + if (error.code === 'ENOENT') { + if (path.dirname(pth) === pth) { + throw permissionError(pth) + } -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); + if (error.message.includes('null bytes')) { + throw error + } - var r = range(a, b, str); + await make(path.dirname(pth)) + return make(pth) + } - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + try { + const stats = await fs.stat(pth) + if (!stats.isDirectory()) { + // This error is never exposed to the user + // it is caught below, and the original error is thrown + throw new Error('The path is not a directory') + } + } catch { + throw error + } + } + } -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; + return make(path.resolve(input)) } -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; +module.exports.makeDirSync = (input, options) => { + checkPath(input) + options = processOptions(options) - if (ai >= 0 && bi > 0) { - begs = []; - left = str.length; + if (useNativeRecursiveOption) { + const pth = path.resolve(input) - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } + return fs.mkdirSync(pth, { + mode: options.mode, + recursive: true + }) + } - bi = str.indexOf(b, i + 1); + const make = pth => { + try { + fs.mkdirSync(pth, options.mode) + } catch (error) { + if (error.code === 'EPERM') { + throw error } - i = ai < bi && ai >= 0 ? ai : bi; - } + if (error.code === 'ENOENT') { + if (path.dirname(pth) === pth) { + throw permissionError(pth) + } - if (begs.length) { - result = [ left, right ]; + if (error.message.includes('null bytes')) { + throw error + } + + make(path.dirname(pth)) + return make(pth) + } + + try { + if (!fs.statSync(pth).isDirectory()) { + // This error is never exposed to the user + // it is caught below, and the original error is thrown + throw new Error('The path is not a directory') + } + } catch { + throw error + } } } - return result; + return make(path.resolve(input)) } /***/ }), -/* 622 */ -/***/ (function(module) { - -module.exports = require("path"); - -/***/ }), -/* 623 */, -/* 624 */, -/* 625 */, -/* 626 */, -/* 627 */, -/* 628 */ +/* 55 */, +/* 56 */, +/* 57 */, +/* 58 */, +/* 59 */, +/* 60 */, +/* 61 */, +/* 62 */, +/* 63 */ /***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; - +/* @flow */ +/*:: -const { stringify } = __webpack_require__(477) -const { outputFileSync } = __webpack_require__(517) +type DotenvParseOptions = { + debug?: boolean +} -function outputJsonSync (file, data, options) { - const str = stringify(data, options) +// keys and values from src +type DotenvParseOutput = { [string]: string } - outputFileSync(file, str, options) +type DotenvConfigOptions = { + path?: string, // path to .env file + encoding?: string, // encoding of .env file + debug?: string // turn on logging for debugging purposes } -module.exports = outputJsonSync - +type DotenvConfigOutput = { + parsed?: DotenvParseOutput, + error?: Error +} -/***/ }), -/* 629 */, -/* 630 */ -/***/ (function(module) { +*/ -module.exports = require("perf_hooks"); +const fs = __webpack_require__(747) +const path = __webpack_require__(622) -/***/ }), -/* 631 */ -/***/ (function(module) { +function log (message /*: string */) { + console.log(`[dotenv][DEBUG] ${message}`) +} -module.exports = require("net"); +const NEWLINE = '\n' +const RE_INI_KEY_VAL = /^\s*([\w.-]+)\s*=\s*(.*)?\s*$/ +const RE_NEWLINES = /\\n/g +const NEWLINES_MATCH = /\n|\r|\r\n/ -/***/ }), -/* 632 */, -/* 633 */, -/* 634 */, -/* 635 */, -/* 636 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +// Parses src into an Object +function parse (src /*: string | Buffer */, options /*: ?DotenvParseOptions */) /*: DotenvParseOutput */ { + const debug = Boolean(options && options.debug) + const obj = {} -"use strict"; + // convert Buffers before splitting into lines and processing + src.toString().split(NEWLINES_MATCH).forEach(function (line, idx) { + // matching "KEY' and 'VAL' in 'KEY=VAL' + const keyValueArr = line.match(RE_INI_KEY_VAL) + // matched? + if (keyValueArr != null) { + const key = keyValueArr[1] + // default undefined or missing values to empty string + let val = (keyValueArr[2] || '') + const end = val.length - 1 + const isDoubleQuoted = val[0] === '"' && val[end] === '"' + const isSingleQuoted = val[0] === "'" && val[end] === "'" + // if single or double quoted, remove quotes + if (isSingleQuoted || isDoubleQuoted) { + val = val.substring(1, end) -/*eslint-disable max-len,no-use-before-define*/ + // if double quoted, expand newlines + if (isDoubleQuoted) { + val = val.replace(RE_NEWLINES, NEWLINE) + } + } else { + // remove surrounding whitespace + val = val.trim() + } -var common = __webpack_require__(436); -var YAMLException = __webpack_require__(844); -var Mark = __webpack_require__(316); -var DEFAULT_SAFE_SCHEMA = __webpack_require__(829); -var DEFAULT_FULL_SCHEMA = __webpack_require__(451); + obj[key] = val + } else if (debug) { + log(`did not match key and value when parsing line ${idx + 1}: ${line}`) + } + }) + return obj +} -var _hasOwnProperty = Object.prototype.hasOwnProperty; +// Populates process.env from .env file +function config (options /*: ?DotenvConfigOptions */) /*: DotenvConfigOutput */ { + let dotenvPath = path.resolve(process.cwd(), '.env') + let encoding /*: string */ = 'utf8' + let debug = false + if (options) { + if (options.path != null) { + dotenvPath = options.path + } + if (options.encoding != null) { + encoding = options.encoding + } + if (options.debug != null) { + debug = true + } + } -var CONTEXT_FLOW_IN = 1; -var CONTEXT_FLOW_OUT = 2; -var CONTEXT_BLOCK_IN = 3; -var CONTEXT_BLOCK_OUT = 4; + try { + // specifying an encoding returns a string instead of a buffer + const parsed = parse(fs.readFileSync(dotenvPath, { encoding }), { debug }) + Object.keys(parsed).forEach(function (key) { + if (!Object.prototype.hasOwnProperty.call(process.env, key)) { + process.env[key] = parsed[key] + } else if (debug) { + log(`"${key}" is already defined in \`process.env\` and will not be overwritten`) + } + }) -var CHOMPING_CLIP = 1; -var CHOMPING_STRIP = 2; -var CHOMPING_KEEP = 3; + return { parsed } + } catch (e) { + return { error: e } + } +} +module.exports.config = config +module.exports.parse = parse -var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; -var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; -var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; -var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; -var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; +/***/ }), +/* 64 */, +/* 65 */, +/* 66 */, +/* 67 */, +/* 68 */, +/* 69 */, +/* 70 */, +/* 71 */, +/* 72 */, +/* 73 */, +/* 74 */, +/* 75 */, +/* 76 */, +/* 77 */, +/* 78 */, +/* 79 */ +/***/ (function(module, __unusedexports, __webpack_require__) { -function _class(obj) { return Object.prototype.toString.call(obj); } +const util = __webpack_require__(669); +const process = __webpack_require__(765); -function is_EOL(c) { - return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); -} +class ClientError extends Error {} -function is_WHITE_SPACE(c) { - return (c === 0x09/* Tab */) || (c === 0x20/* Space */); -} +class TimeoutError extends Error {} -function is_WS_OR_EOL(c) { - return (c === 0x09/* Tab */) || - (c === 0x20/* Space */) || - (c === 0x0A/* LF */) || - (c === 0x0D/* CR */); +function log(prefix, obj) { + if (process.env.NODE_ENV !== "test") { + const str = obj.map(o => (typeof o === "object" ? inspect(o) : o)); + if (prefix) { + console.log.apply(console, [prefix, ...str]); + } else { + console.log.apply(console, str); + } + } } -function is_FLOW_INDICATOR(c) { - return c === 0x2C/* , */ || - c === 0x5B/* [ */ || - c === 0x5D/* ] */ || - c === 0x7B/* { */ || - c === 0x7D/* } */; -} +const logger = { + level: "info", -function fromHexCode(c) { - var lc; + trace: (...str) => { + if (logger.level === "trace") { + log("[TRACE]", str); + } + }, - if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { - return c - 0x30; - } + debug: (...str) => { + if (logger.level === "trace" || logger.level === "debug") { + log("DEBUG", str); + } + }, - /*eslint-disable no-bitwise*/ - lc = c | 0x20; + info: (...str) => log("[INFO] ", str), + warn: (...str) => log("[WARN] ", str), - if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { - return lc - 0x61 + 10; + error: (...str) => { + if (str.length === 1) { + if (str[0] instanceof Error) { + if (logger.level === "trace" || logger.level === "debug") { + log(null, [str[0].stack || str[0]]); + } else { + log("[ERROR] ", [str[0].message || str[0]]); + } + } + } else { + log("[ERROR] ", str); + } } +}; - return -1; +function inspect(obj) { + return util.inspect(obj, false, null, true); } -function escapedHexLen(c) { - if (c === 0x78/* x */) { return 2; } - if (c === 0x75/* u */) { return 4; } - if (c === 0x55/* U */) { return 8; } - return 0; -} +module.exports = { + ClientError, + TimeoutError, + logger +}; -function fromDecimalCode(c) { - if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { - return c - 0x30; - } - return -1; -} +/***/ }), +/* 80 */, +/* 81 */ +/***/ (function(module, __unusedexports, __webpack_require__) { -function simpleEscapeSequence(c) { - /* eslint-disable indent */ - return (c === 0x30/* 0 */) ? '\x00' : - (c === 0x61/* a */) ? '\x07' : - (c === 0x62/* b */) ? '\x08' : - (c === 0x74/* t */) ? '\x09' : - (c === 0x09/* Tab */) ? '\x09' : - (c === 0x6E/* n */) ? '\x0A' : - (c === 0x76/* v */) ? '\x0B' : - (c === 0x66/* f */) ? '\x0C' : - (c === 0x72/* r */) ? '\x0D' : - (c === 0x65/* e */) ? '\x1B' : - (c === 0x20/* Space */) ? ' ' : - (c === 0x22/* " */) ? '\x22' : - (c === 0x2F/* / */) ? '/' : - (c === 0x5C/* \ */) ? '\x5C' : - (c === 0x4E/* N */) ? '\x85' : - (c === 0x5F/* _ */) ? '\xA0' : - (c === 0x4C/* L */) ? '\u2028' : - (c === 0x50/* P */) ? '\u2029' : ''; -} +const { logger } = __webpack_require__(79); +const exec = __webpack_require__(986); -function charFromCodepoint(c) { - if (c <= 0xFFFF) { - return String.fromCharCode(c); +class ExitError extends Error { + constructor(message, code) { + super(message); + this.code = code; } - // Encode UTF-16 surrogate pair - // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF - return String.fromCharCode( - ((c - 0x010000) >> 10) + 0xD800, - ((c - 0x010000) & 0x03FF) + 0xDC00 - ); } -var simpleEscapeCheck = new Array(256); // integer, for fast access -var simpleEscapeMap = new Array(256); -for (var i = 0; i < 256; i++) { - simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; - simpleEscapeMap[i] = simpleEscapeSequence(i); +async function execute(cwd, command) { + logger.info(`Execute command '${command}' in dir '${cwd}'`); + const options = {}; + options.cwd = cwd; + await exec.exec(command, [], options); } +module.exports = { + ExitError, + execute +}; -function State(input, options) { - this.input = input; - this.filename = options['filename'] || null; - this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; - this.onWarning = options['onWarning'] || null; - this.legacy = options['legacy'] || false; - this.json = options['json'] || false; - this.listener = options['listener'] || null; +/***/ }), +/* 82 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - this.implicitTypes = this.schema.compiledImplicit; - this.typeMap = this.schema.compiledTypeMap; +"use strict"; - this.length = input.length; - this.position = 0; - this.line = 0; - this.lineStart = 0; - this.lineIndent = 0; - this.documents = []; +var Type = __webpack_require__(945); - /* - this.version; - this.checkLineBreaks; - this.tagMap; - this.anchorMap; - this.tag; - this.anchor; - this.kind; - this.result;*/ +var YAML_DATE_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9])' + // [2] month + '-([0-9][0-9])$'); // [3] day + +var YAML_TIMESTAMP_REGEXP = new RegExp( + '^([0-9][0-9][0-9][0-9])' + // [1] year + '-([0-9][0-9]?)' + // [2] month + '-([0-9][0-9]?)' + // [3] day + '(?:[Tt]|[ \\t]+)' + // ... + '([0-9][0-9]?)' + // [4] hour + ':([0-9][0-9])' + // [5] minute + ':([0-9][0-9])' + // [6] second + '(?:\\.([0-9]*))?' + // [7] fraction + '(?:[ \\t]*(Z|([-+])([0-9][0-9]?)' + // [8] tz [9] tz_sign [10] tz_hour + '(?::([0-9][0-9]))?))?$'); // [11] tz_minute +function resolveYamlTimestamp(data) { + if (data === null) return false; + if (YAML_DATE_REGEXP.exec(data) !== null) return true; + if (YAML_TIMESTAMP_REGEXP.exec(data) !== null) return true; + return false; } +function constructYamlTimestamp(data) { + var match, year, month, day, hour, minute, second, fraction = 0, + delta = null, tz_hour, tz_minute, date; -function generateError(state, message) { - return new YAMLException( - message, - new Mark(state.filename, state.input, state.position, state.line, (state.position - state.lineStart))); -} + match = YAML_DATE_REGEXP.exec(data); + if (match === null) match = YAML_TIMESTAMP_REGEXP.exec(data); -function throwError(state, message) { - throw generateError(state, message); -} + if (match === null) throw new Error('Date resolve error'); -function throwWarning(state, message) { - if (state.onWarning) { - state.onWarning.call(null, generateError(state, message)); - } -} + // match: [1] year [2] month [3] day + year = +(match[1]); + month = +(match[2]) - 1; // JS month starts with 0 + day = +(match[3]); -var directiveHandlers = { + if (!match[4]) { // no hour + return new Date(Date.UTC(year, month, day)); + } - YAML: function handleYamlDirective(state, name, args) { + // match: [4] hour [5] minute [6] second [7] fraction - var match, major, minor; + hour = +(match[4]); + minute = +(match[5]); + second = +(match[6]); - if (state.version !== null) { - throwError(state, 'duplication of %YAML directive'); + if (match[7]) { + fraction = match[7].slice(0, 3); + while (fraction.length < 3) { // milli-seconds + fraction += '0'; } + fraction = +fraction; + } - if (args.length !== 1) { - throwError(state, 'YAML directive accepts exactly one argument'); - } + // match: [8] tz [9] tz_sign [10] tz_hour [11] tz_minute - match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); + if (match[9]) { + tz_hour = +(match[10]); + tz_minute = +(match[11] || 0); + delta = (tz_hour * 60 + tz_minute) * 60000; // delta in mili-seconds + if (match[9] === '-') delta = -delta; + } - if (match === null) { - throwError(state, 'ill-formed argument of the YAML directive'); - } + date = new Date(Date.UTC(year, month, day, hour, minute, second, fraction)); - major = parseInt(match[1], 10); - minor = parseInt(match[2], 10); + if (delta) date.setTime(date.getTime() - delta); - if (major !== 1) { - throwError(state, 'unacceptable YAML version of the document'); - } + return date; +} - state.version = args[0]; - state.checkLineBreaks = (minor < 2); +function representYamlTimestamp(object /*, style*/) { + return object.toISOString(); +} - if (minor !== 1 && minor !== 2) { - throwWarning(state, 'unsupported YAML version of the document'); - } - }, +module.exports = new Type('tag:yaml.org,2002:timestamp', { + kind: 'scalar', + resolve: resolveYamlTimestamp, + construct: constructYamlTimestamp, + instanceOf: Date, + represent: representYamlTimestamp +}); - TAG: function handleTagDirective(state, name, args) { - var handle, prefix; +/***/ }), +/* 83 */, +/* 84 */, +/* 85 */, +/* 86 */, +/* 87 */ +/***/ (function(module) { - if (args.length !== 2) { - throwError(state, 'TAG directive accepts exactly two arguments'); - } +module.exports = require("os"); - handle = args[0]; - prefix = args[1]; +/***/ }), +/* 88 */, +/* 89 */, +/* 90 */, +/* 91 */, +/* 92 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - if (!PATTERN_TAG_HANDLE.test(handle)) { - throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); - } +"use strict"; - if (_hasOwnProperty.call(state.tagMap, handle)) { - throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); - } - if (!PATTERN_TAG_URI.test(prefix)) { - throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); - } +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const copy = __webpack_require__(774).copy +const remove = __webpack_require__(368).remove +const mkdirp = __webpack_require__(727).mkdirp +const pathExists = __webpack_require__(322).pathExists +const stat = __webpack_require__(425) - state.tagMap[handle] = prefix; +function move (src, dest, opts, cb) { + if (typeof opts === 'function') { + cb = opts + opts = {} } -}; + const overwrite = opts.overwrite || opts.clobber || false -function captureSegment(state, start, end, checkJson) { - var _position, _length, _character, _result; + stat.checkPaths(src, dest, 'move', (err, stats) => { + if (err) return cb(err) + const { srcStat } = stats + stat.checkParentPaths(src, srcStat, dest, 'move', err => { + if (err) return cb(err) + mkdirp(path.dirname(dest), err => { + if (err) return cb(err) + return doRename(src, dest, overwrite, cb) + }) + }) + }) +} - if (start < end) { - _result = state.input.slice(start, end); +function doRename (src, dest, overwrite, cb) { + if (overwrite) { + return remove(dest, err => { + if (err) return cb(err) + return rename(src, dest, overwrite, cb) + }) + } + pathExists(dest, (err, destExists) => { + if (err) return cb(err) + if (destExists) return cb(new Error('dest already exists.')) + return rename(src, dest, overwrite, cb) + }) +} - if (checkJson) { - for (_position = 0, _length = _result.length; _position < _length; _position += 1) { - _character = _result.charCodeAt(_position); - if (!(_character === 0x09 || - (0x20 <= _character && _character <= 0x10FFFF))) { - throwError(state, 'expected valid JSON character'); - } - } - } else if (PATTERN_NON_PRINTABLE.test(_result)) { - throwError(state, 'the stream contains non-printable characters'); - } +function rename (src, dest, overwrite, cb) { + fs.rename(src, dest, err => { + if (!err) return cb() + if (err.code !== 'EXDEV') return cb(err) + return moveAcrossDevice(src, dest, overwrite, cb) + }) +} - state.result += _result; +function moveAcrossDevice (src, dest, overwrite, cb) { + const opts = { + overwrite, + errorOnExist: true } + copy(src, dest, opts, err => { + if (err) return cb(err) + return remove(src, cb) + }) } -function mergeMappings(state, destination, source, overridableKeys) { - var sourceKeys, key, index, quantity; +module.exports = move - if (!common.isObject(source)) { - throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); - } - sourceKeys = Object.keys(source); +/***/ }), +/* 93 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { - key = sourceKeys[index]; +"use strict"; - if (!_hasOwnProperty.call(destination, key)) { - destination[key] = source[key]; - overridableKeys[key] = true; - } - } -} -function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) { - var index, quantity; - // The output is a plain object here, so keys can only be strings. - // We need to convert keyNode to a string, but doing so can hang the process - // (deeply nested arrays that explode exponentially using aliases). - if (Array.isArray(keyNode)) { - keyNode = Array.prototype.slice.call(keyNode); +var common = __webpack_require__(740); - for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { - if (Array.isArray(keyNode[index])) { - throwError(state, 'nested arrays are not supported inside keys'); - } - if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { - keyNode[index] = '[object Object]'; - } - } - } +function Mark(name, buffer, position, line, column) { + this.name = name; + this.buffer = buffer; + this.position = position; + this.line = line; + this.column = column; +} - // Avoid code execution in load() via toString property - // (still use its own toString for arrays, timestamps, - // and whatever user schema extensions happen to have @@toStringTag) - if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { - keyNode = '[object Object]'; - } +Mark.prototype.getSnippet = function getSnippet(indent, maxLength) { + var head, start, tail, end, snippet; + + if (!this.buffer) return null; - keyNode = String(keyNode); + indent = indent || 4; + maxLength = maxLength || 75; - if (_result === null) { - _result = {}; + head = ''; + start = this.position; + + while (start > 0 && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(start - 1)) === -1) { + start -= 1; + if (this.position - start > (maxLength / 2 - 1)) { + head = ' ... '; + start += 5; + break; + } } - if (keyTag === 'tag:yaml.org,2002:merge') { - if (Array.isArray(valueNode)) { - for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { - mergeMappings(state, _result, valueNode[index], overridableKeys); - } - } else { - mergeMappings(state, _result, valueNode, overridableKeys); - } - } else { - if (!state.json && - !_hasOwnProperty.call(overridableKeys, keyNode) && - _hasOwnProperty.call(_result, keyNode)) { - state.line = startLine || state.line; - state.position = startPos || state.position; - throwError(state, 'duplicated mapping key'); + tail = ''; + end = this.position; + + while (end < this.buffer.length && '\x00\r\n\x85\u2028\u2029'.indexOf(this.buffer.charAt(end)) === -1) { + end += 1; + if (end - this.position > (maxLength / 2 - 1)) { + tail = ' ... '; + end -= 5; + break; } - _result[keyNode] = valueNode; - delete overridableKeys[keyNode]; } - return _result; -} + snippet = this.buffer.slice(start, end); -function readLineBreak(state) { - var ch; + return common.repeat(' ', indent) + head + snippet + tail + '\n' + + common.repeat(' ', indent + this.position - start + head.length) + '^'; +}; - ch = state.input.charCodeAt(state.position); - if (ch === 0x0A/* LF */) { - state.position++; - } else if (ch === 0x0D/* CR */) { - state.position++; - if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { - state.position++; - } - } else { - throwError(state, 'a line break is expected'); - } +Mark.prototype.toString = function toString(compact) { + var snippet, where = ''; - state.line += 1; - state.lineStart = state.position; -} + if (this.name) { + where += 'in "' + this.name + '" '; + } -function skipSeparationSpace(state, allowComments, checkIndent) { - var lineBreaks = 0, - ch = state.input.charCodeAt(state.position); + where += 'at line ' + (this.line + 1) + ', column ' + (this.column + 1); - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } + if (!compact) { + snippet = this.getSnippet(); - if (allowComments && ch === 0x23/* # */) { - do { - ch = state.input.charCodeAt(++state.position); - } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); + if (snippet) { + where += ':\n' + snippet; } + } - if (is_EOL(ch)) { - readLineBreak(state); + return where; +}; - ch = state.input.charCodeAt(state.position); - lineBreaks++; - state.lineIndent = 0; - while (ch === 0x20/* Space */) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); - } - } else { - break; - } - } +module.exports = Mark; - if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { - throwWarning(state, 'deficient indentation'); - } - return lineBreaks; -} +/***/ }), +/* 94 */, +/* 95 */ +/***/ (function(module, __unusedexports, __webpack_require__) { -function testDocumentSeparator(state) { - var _position = state.position, - ch; +"use strict"; - ch = state.input.charCodeAt(_position); - // Condition state.position === state.lineStart is tested - // in parent on each call, for efficiency. No needs to test here again. - if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && - ch === state.input.charCodeAt(_position + 1) && - ch === state.input.charCodeAt(_position + 2)) { +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const copySync = __webpack_require__(640).copySync +const removeSync = __webpack_require__(368).removeSync +const mkdirpSync = __webpack_require__(727).mkdirpSync +const stat = __webpack_require__(425) - _position += 3; +function moveSync (src, dest, opts) { + opts = opts || {} + const overwrite = opts.overwrite || opts.clobber || false - ch = state.input.charCodeAt(_position); + const { srcStat } = stat.checkPathsSync(src, dest, 'move') + stat.checkParentPathsSync(src, srcStat, dest, 'move') + mkdirpSync(path.dirname(dest)) + return doRename(src, dest, overwrite) +} - if (ch === 0 || is_WS_OR_EOL(ch)) { - return true; - } +function doRename (src, dest, overwrite) { + if (overwrite) { + removeSync(dest) + return rename(src, dest, overwrite) } + if (fs.existsSync(dest)) throw new Error('dest already exists.') + return rename(src, dest, overwrite) +} - return false; +function rename (src, dest, overwrite) { + try { + fs.renameSync(src, dest) + } catch (err) { + if (err.code !== 'EXDEV') throw err + return moveAcrossDevice(src, dest, overwrite) + } } -function writeFoldedLines(state, count) { - if (count === 1) { - state.result += ' '; - } else if (count > 1) { - state.result += common.repeat('\n', count - 1); +function moveAcrossDevice (src, dest, overwrite) { + const opts = { + overwrite, + errorOnExist: true } + copySync(src, dest, opts) + return removeSync(src) } +module.exports = moveSync -function readPlainScalar(state, nodeIndent, withinFlowCollection) { - var preceding, - following, - captureStart, - captureEnd, - hasPendingContent, - _line, - _lineStart, - _lineIndent, - _kind = state.kind, - _result = state.result, - ch; - ch = state.input.charCodeAt(state.position); +/***/ }), +/* 96 */, +/* 97 */, +/* 98 */, +/* 99 */, +/* 100 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - if (is_WS_OR_EOL(ch) || - is_FLOW_INDICATOR(ch) || - ch === 0x23/* # */ || - ch === 0x26/* & */ || - ch === 0x2A/* * */ || - ch === 0x21/* ! */ || - ch === 0x7C/* | */ || - ch === 0x3E/* > */ || - ch === 0x27/* ' */ || - ch === 0x22/* " */ || - ch === 0x25/* % */ || - ch === 0x40/* @ */ || - ch === 0x60/* ` */) { - return false; - } +"use strict"; - if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { - following = state.input.charCodeAt(state.position + 1); - if (is_WS_OR_EOL(following) || - withinFlowCollection && is_FLOW_INDICATOR(following)) { - return false; - } - } +var Type = __webpack_require__(945); - state.kind = 'scalar'; - state.result = ''; - captureStart = captureEnd = state.position; - hasPendingContent = false; +var _hasOwnProperty = Object.prototype.hasOwnProperty; - while (ch !== 0) { - if (ch === 0x3A/* : */) { - following = state.input.charCodeAt(state.position + 1); +function resolveYamlSet(data) { + if (data === null) return true; - if (is_WS_OR_EOL(following) || - withinFlowCollection && is_FLOW_INDICATOR(following)) { - break; - } + var key, object = data; - } else if (ch === 0x23/* # */) { - preceding = state.input.charCodeAt(state.position - 1); + for (key in object) { + if (_hasOwnProperty.call(object, key)) { + if (object[key] !== null) return false; + } + } - if (is_WS_OR_EOL(preceding)) { - break; - } + return true; +} - } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || - withinFlowCollection && is_FLOW_INDICATOR(ch)) { - break; +function constructYamlSet(data) { + return data !== null ? data : {}; +} - } else if (is_EOL(ch)) { - _line = state.line; - _lineStart = state.lineStart; - _lineIndent = state.lineIndent; - skipSeparationSpace(state, false, -1); +module.exports = new Type('tag:yaml.org,2002:set', { + kind: 'mapping', + resolve: resolveYamlSet, + construct: constructYamlSet +}); - if (state.lineIndent >= nodeIndent) { - hasPendingContent = true; - ch = state.input.charCodeAt(state.position); - continue; - } else { - state.position = captureEnd; - state.line = _line; - state.lineStart = _lineStart; - state.lineIndent = _lineIndent; - break; - } - } - if (hasPendingContent) { - captureSegment(state, captureStart, captureEnd, false); - writeFoldedLines(state, state.line - _line); - captureStart = captureEnd = state.position; - hasPendingContent = false; - } +/***/ }), +/* 101 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { readDefinitionFile } = __webpack_require__(799); +const { treatProject } = __webpack_require__(443); +const { validateNode } = __webpack_require__(127); + +/** + * It will return back the definition tree plus dependencies as an object. + * + * @param {string} file - The definition file. It can be a URL or a in the filesystem. + * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL + */ +async function getTree(file, urlPlaceHolders = {}) { + const definition = await readDefinitionFile(file, urlPlaceHolders); + return dependencyListToTree(definition.dependencies, definition); +} - if (!is_WHITE_SPACE(ch)) { - captureEnd = state.position + 1; - } +/** + * It will return back the definition tree plus dependencies as an object for a particular project. + * + * @param {string} file - The definition file. It can be a URL or a in the filesystem. + * @param {string} project - The project name to look for. + * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL + */ +async function getTreeForProject(file, project, urlPlaceHolders = {}) { + return lookForProject( + await getTree(file, urlPlaceHolders), + project, + undefined + ); +} - ch = state.input.charCodeAt(++state.position); +/** + * It returns the leaf from the tree corresponding to the project name + * @param {Object} tree + * @param {String} project the project name + */ +function lookForProject(tree, project) { + if (tree && tree.length > 0) { + const leaf = tree.find(leaf => project === leaf.project); + return leaf + ? leaf + : tree + .map(leaf => lookForProject(leaf.children, project)) + .find(child => child); + } else { + return undefined; } +} - captureSegment(state, captureStart, captureEnd, false); +/** + * it treats the dependenciy list together with the build configuration to compose the build tree + * @param {Object} dependencyList the dependency list + * @param {Object} buildConfiguration the build configuration directly read from the yaml file + */ +function dependencyListToTree(dependencyList, buildConfiguration) { + const map = dependencyList.reduce((map, dependency, i) => { + map[dependency.project] = { index: i, node: undefined }; + dependency.children = []; + dependency.parents = []; + return map; + }, {}); - if (state.result) { - return true; - } + return dependencyList.reduce((roots, node) => { + validateNode(node); + map[node.project].node = { + ...node, + repo: { + group: node.project.split("/")[0], + name: node.project.split("/")[1] + }, + ...treatProject(node.project, buildConfiguration) + }; - state.kind = _kind; - state.result = _result; - return false; + if (node.dependencies && node.dependencies.length > 0) { + node.dependencies.forEach(dependency => { + dependencyList[map[dependency.project].index].children.push({ + ...map[node.project].node + }); + node.parents.push({ + ...map[dependency.project].node + }); + }); + } else { + roots.push(map[node.project].node); + } + return roots; + }, []); } -function readSingleQuotedScalar(state, nodeIndent) { - var ch, - captureStart, captureEnd; +module.exports = { + getTree, + getTreeForProject, + dependencyListToTree +}; - ch = state.input.charCodeAt(state.position); - if (ch !== 0x27/* ' */) { - return false; - } +/***/ }), +/* 102 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { - state.kind = 'scalar'; - state.result = ''; - state.position++; - captureStart = captureEnd = state.position; +"use strict"; - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 0x27/* ' */) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); +// For internal use, subject to change. +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +const fs = __importStar(__webpack_require__(747)); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(394); +function issueCommand(command, message) { + const filePath = process.env[`GITHUB_${command}`]; + if (!filePath) { + throw new Error(`Unable to find environment variable for file command ${command}`); + } + if (!fs.existsSync(filePath)) { + throw new Error(`Missing file at path: ${filePath}`); + } + fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, { + encoding: 'utf8' + }); +} +exports.issueCommand = issueCommand; +//# sourceMappingURL=file-command.js.map - if (ch === 0x27/* ' */) { - captureStart = state.position; - state.position++; - captureEnd = state.position; - } else { - return true; - } +/***/ }), +/* 103 */, +/* 104 */, +/* 105 */, +/* 106 */, +/* 107 */, +/* 108 */, +/* 109 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, 'unexpected end of the document within a single quoted scalar'); +const { ClientError, logger } = __webpack_require__(79); +const { + executeFromEvent: pullRequestEventFlow +} = __webpack_require__(701); +const { + executeFromEvent: fdbEventFlow +} = __webpack_require__(771); +const { + executeFromEvent: singleEventFlow +} = __webpack_require__(15); +const { + isPullRequestFlowType, + isFDFlowType, + isSingleFlowType, + getFlowType +} = __webpack_require__(933); +const { createOctokitInstance, getProcessEnvVariable } = __webpack_require__(867); +__webpack_require__(63).config(); - } else { - state.position++; - captureEnd = state.position; - } +async function main() { + const token = getProcessEnvVariable("GITHUB_TOKEN"); + const octokit = createOctokitInstance(token); + if (isPullRequestFlowType()) { + await pullRequestEventFlow(token, octokit, process.env); + } else if (isFDFlowType()) { + await fdbEventFlow(token, octokit, process.env); + } else if (isSingleFlowType()) { + await singleEventFlow(token, octokit, process.env); + } else { + throw new Error( + `flow type input value '${getFlowType()}' is not supported. Please check documentation.` + ); } +} - throwError(state, 'unexpected end of the stream within a single quoted scalar'); +if (require.main === require.cache[eval('__filename')]) { + main().catch(e => { + if (e instanceof ClientError) { + process.exitCode = 2; + logger.error(e); + } else { + process.exitCode = 1; + logger.error(e); + } + }); } -function readDoubleQuotedScalar(state, nodeIndent) { - var captureStart, - captureEnd, - hexLength, - hexResult, - tmp, - ch; +module.exports = { main }; - ch = state.input.charCodeAt(state.position); - if (ch !== 0x22/* " */) { - return false; - } +/***/ }), +/* 110 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - state.kind = 'scalar'; - state.result = ''; - state.position++; - captureStart = captureEnd = state.position; +"use strict"; - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - if (ch === 0x22/* " */) { - captureSegment(state, captureStart, state.position, true); - state.position++; - return true; - } else if (ch === 0x5C/* \ */) { - captureSegment(state, captureStart, state.position, true); - ch = state.input.charCodeAt(++state.position); +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const mkdirsSync = __webpack_require__(727).mkdirsSync +const utimesMillisSync = __webpack_require__(946).utimesMillisSync +const stat = __webpack_require__(425) - if (is_EOL(ch)) { - skipSeparationSpace(state, false, nodeIndent); +function copySync (src, dest, opts) { + if (typeof opts === 'function') { + opts = { filter: opts } + } - // TODO: rework to inline fn with no type cast? - } else if (ch < 256 && simpleEscapeCheck[ch]) { - state.result += simpleEscapeMap[ch]; - state.position++; + opts = opts || {} + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber - } else if ((tmp = escapedHexLen(ch)) > 0) { - hexLength = tmp; - hexResult = 0; + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) + } - for (; hexLength > 0; hexLength--) { - ch = state.input.charCodeAt(++state.position); + const { srcStat, destStat } = stat.checkPathsSync(src, dest, 'copy') + stat.checkParentPathsSync(src, srcStat, dest, 'copy') + return handleFilterAndCopy(destStat, src, dest, opts) +} - if ((tmp = fromHexCode(ch)) >= 0) { - hexResult = (hexResult << 4) + tmp; +function handleFilterAndCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + const destParent = path.dirname(dest) + if (!fs.existsSync(destParent)) mkdirsSync(destParent) + return startCopy(destStat, src, dest, opts) +} - } else { - throwError(state, 'expected hexadecimal character'); - } - } +function startCopy (destStat, src, dest, opts) { + if (opts.filter && !opts.filter(src, dest)) return + return getStats(destStat, src, dest, opts) +} - state.result += charFromCodepoint(hexResult); +function getStats (destStat, src, dest, opts) { + const statSync = opts.dereference ? fs.statSync : fs.lstatSync + const srcStat = statSync(src) - state.position++; + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts) +} - } else { - throwError(state, 'unknown escape sequence'); - } +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) return copyFile(srcStat, src, dest, opts) + return mayCopyFile(srcStat, src, dest, opts) +} - captureStart = captureEnd = state.position; +function mayCopyFile (srcStat, src, dest, opts) { + if (opts.overwrite) { + fs.unlinkSync(dest) + return copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new Error(`'${dest}' already exists`) + } +} - } else if (is_EOL(ch)) { - captureSegment(state, captureStart, captureEnd, true); - writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); - captureStart = captureEnd = state.position; +function copyFile (srcStat, src, dest, opts) { + fs.copyFileSync(src, dest) + if (opts.preserveTimestamps) handleTimestamps(srcStat.mode, src, dest) + return setDestMode(dest, srcStat.mode) +} - } else if (state.position === state.lineStart && testDocumentSeparator(state)) { - throwError(state, 'unexpected end of the document within a double quoted scalar'); +function handleTimestamps (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) makeFileWritable(dest, srcMode) + return setDestTimestamps(src, dest) +} - } else { - state.position++; - captureEnd = state.position; - } - } +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} - throwError(state, 'unexpected end of the stream within a double quoted scalar'); +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) } -function readFlowCollection(state, nodeIndent) { - var readNext = true, - _line, - _tag = state.tag, - _result, - _anchor = state.anchor, - following, - terminator, - isPair, - isExplicitPair, - isMapping, - overridableKeys = {}, - keyNode, - keyTag, - valueNode, - ch; +function setDestMode (dest, srcMode) { + return fs.chmodSync(dest, srcMode) +} - ch = state.input.charCodeAt(state.position); +function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = fs.statSync(src) + return utimesMillisSync(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} - if (ch === 0x5B/* [ */) { - terminator = 0x5D;/* ] */ - isMapping = false; - _result = []; - } else if (ch === 0x7B/* { */) { - terminator = 0x7D;/* } */ - isMapping = true; - _result = {}; - } else { - return false; +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts) + if (destStat && !destStat.isDirectory()) { + throw new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`) } + return copyDir(src, dest, opts) +} - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; - } +function mkDirAndCopy (srcMode, src, dest, opts) { + fs.mkdirSync(dest) + copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} - ch = state.input.charCodeAt(++state.position); +function copyDir (src, dest, opts) { + fs.readdirSync(src).forEach(item => copyDirItem(item, src, dest, opts)) +} - while (ch !== 0) { - skipSeparationSpace(state, true, nodeIndent); +function copyDirItem (item, src, dest, opts) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + const { destStat } = stat.checkPathsSync(srcItem, destItem, 'copy') + return startCopy(destStat, srcItem, destItem, opts) +} - ch = state.input.charCodeAt(state.position); +function onLink (destStat, src, dest, opts) { + let resolvedSrc = fs.readlinkSync(src) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } - if (ch === terminator) { - state.position++; - state.tag = _tag; - state.anchor = _anchor; - state.kind = isMapping ? 'mapping' : 'sequence'; - state.result = _result; - return true; - } else if (!readNext) { - throwError(state, 'missed comma between flow collection entries'); + if (!destStat) { + return fs.symlinkSync(resolvedSrc, dest) + } else { + let resolvedDest + try { + resolvedDest = fs.readlinkSync(dest) + } catch (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlinkSync(resolvedSrc, dest) + throw err + } + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) + } + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`) } - keyTag = keyNode = valueNode = null; - isPair = isExplicitPair = false; + // prevent copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (fs.statSync(dest).isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`) + } + return copyLink(resolvedSrc, dest) + } +} - if (ch === 0x3F/* ? */) { - following = state.input.charCodeAt(state.position + 1); +function copyLink (resolvedSrc, dest) { + fs.unlinkSync(dest) + return fs.symlinkSync(resolvedSrc, dest) +} - if (is_WS_OR_EOL(following)) { - isPair = isExplicitPair = true; - state.position++; - skipSeparationSpace(state, true, nodeIndent); - } - } +module.exports = copySync - _line = state.line; - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - keyTag = state.tag; - keyNode = state.result; - skipSeparationSpace(state, true, nodeIndent); - ch = state.input.charCodeAt(state.position); +/***/ }), +/* 111 */, +/* 112 */, +/* 113 */, +/* 114 */, +/* 115 */, +/* 116 */, +/* 117 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { - if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { - isPair = true; - ch = state.input.charCodeAt(++state.position); - skipSeparationSpace(state, true, nodeIndent); - composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); - valueNode = state.result; - } +// Copyright Joyent, Inc. and other Node contributors. +// +// Permission is hereby granted, free of charge, to any person obtaining a +// copy of this software and associated documentation files (the +// "Software"), to deal in the Software without restriction, including +// without limitation the rights to use, copy, modify, merge, publish, +// distribute, sublicense, and/or sell copies of the Software, and to permit +// persons to whom the Software is furnished to do so, subject to the +// following conditions: +// +// The above copyright notice and this permission notice shall be included +// in all copies or substantial portions of the Software. +// +// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN +// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, +// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR +// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE +// USE OR OTHER DEALINGS IN THE SOFTWARE. - if (isMapping) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode); - } else if (isPair) { - _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode)); - } else { - _result.push(keyNode); - } +var pathModule = __webpack_require__(622); +var isWindows = process.platform === 'win32'; +var fs = __webpack_require__(747); + +// JavaScript implementation of realpath, ported from node pre-v6 + +var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); - skipSeparationSpace(state, true, nodeIndent); +function rethrow() { + // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and + // is fairly slow to generate. + var callback; + if (DEBUG) { + var backtrace = new Error; + callback = debugCallback; + } else + callback = missingCallback; - ch = state.input.charCodeAt(state.position); + return callback; - if (ch === 0x2C/* , */) { - readNext = true; - ch = state.input.charCodeAt(++state.position); - } else { - readNext = false; + function debugCallback(err) { + if (err) { + backtrace.message = err.message; + err = backtrace; + missingCallback(err); } } - throwError(state, 'unexpected end of the stream within a flow collection'); + function missingCallback(err) { + if (err) { + if (process.throwDeprecation) + throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs + else if (!process.noDeprecation) { + var msg = 'fs: missing callback ' + (err.stack || err.message); + if (process.traceDeprecation) + console.trace(msg); + else + console.error(msg); + } + } + } } -function readBlockScalar(state, nodeIndent) { - var captureStart, - folding, - chomping = CHOMPING_CLIP, - didReadContent = false, - detectedIndent = false, - textIndent = nodeIndent, - emptyLines = 0, - atMoreIndented = false, - tmp, - ch; - - ch = state.input.charCodeAt(state.position); - - if (ch === 0x7C/* | */) { - folding = false; - } else if (ch === 0x3E/* > */) { - folding = true; - } else { - return false; - } +function maybeCallback(cb) { + return typeof cb === 'function' ? cb : rethrow(); +} - state.kind = 'scalar'; - state.result = ''; +var normalize = pathModule.normalize; - while (ch !== 0) { - ch = state.input.charCodeAt(++state.position); +// Regexp that finds the next partion of a (partial) path +// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] +if (isWindows) { + var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; +} else { + var nextPartRe = /(.*?)(?:[\/]+|$)/g; +} - if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { - if (CHOMPING_CLIP === chomping) { - chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; - } else { - throwError(state, 'repeat of a chomping mode identifier'); - } +// Regex to find the device root, including trailing slash. E.g. 'c:\\'. +if (isWindows) { + var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; +} else { + var splitRootRe = /^[\/]*/; +} - } else if ((tmp = fromDecimalCode(ch)) >= 0) { - if (tmp === 0) { - throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); - } else if (!detectedIndent) { - textIndent = nodeIndent + tmp - 1; - detectedIndent = true; - } else { - throwError(state, 'repeat of an indentation width identifier'); - } +exports.realpathSync = function realpathSync(p, cache) { + // make p is absolute + p = pathModule.resolve(p); - } else { - break; - } + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return cache[p]; } - if (is_WHITE_SPACE(ch)) { - do { ch = state.input.charCodeAt(++state.position); } - while (is_WHITE_SPACE(ch)); + var original = p, + seenLinks = {}, + knownHard = {}; - if (ch === 0x23/* # */) { - do { ch = state.input.charCodeAt(++state.position); } - while (!is_EOL(ch) && (ch !== 0)); - } - } + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; - while (ch !== 0) { - readLineBreak(state); - state.lineIndent = 0; + start(); - ch = state.input.charCodeAt(state.position); + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; - while ((!detectedIndent || state.lineIndent < textIndent) && - (ch === 0x20/* Space */)) { - state.lineIndent++; - ch = state.input.charCodeAt(++state.position); + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstatSync(base); + knownHard[base] = true; } + } - if (!detectedIndent && state.lineIndent > textIndent) { - textIndent = state.lineIndent; - } + // walk down the path, swapping out linked pathparts for their real + // values + // NB: p.length changes. + while (pos < p.length) { + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; - if (is_EOL(ch)) { - emptyLines++; + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { continue; } - // End of the scalar. - if (state.lineIndent < textIndent) { + var resolvedLink; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // some known symbolic link. no need to stat again. + resolvedLink = cache[base]; + } else { + var stat = fs.lstatSync(base); + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + continue; + } - // Perform the chomping. - if (chomping === CHOMPING_KEEP) { - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); - } else if (chomping === CHOMPING_CLIP) { - if (didReadContent) { // i.e. only if the scalar is not empty. - state.result += '\n'; + // read the link if it wasn't read before + // dev/ino always return 0 on windows, so skip the check. + var linkTarget = null; + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + linkTarget = seenLinks[id]; } } - - // Break this `while` cycle and go to the funciton's epilogue. - break; + if (linkTarget === null) { + fs.statSync(base); + linkTarget = fs.readlinkSync(base); + } + resolvedLink = pathModule.resolve(previous, linkTarget); + // track this, if given a cache. + if (cache) cache[base] = resolvedLink; + if (!isWindows) seenLinks[id] = linkTarget; } - // Folded style: use fancy rules to handle line breaks. - if (folding) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); + } - // Lines starting with white space characters (more-indented lines) are not folded. - if (is_WHITE_SPACE(ch)) { - atMoreIndented = true; - // except for the first content line (cf. Example 8.1) - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + if (cache) cache[original] = p; - // End of more-indented block. - } else if (atMoreIndented) { - atMoreIndented = false; - state.result += common.repeat('\n', emptyLines + 1); + return p; +}; - // Just one line break - perceive as the same line. - } else if (emptyLines === 0) { - if (didReadContent) { // i.e. only if we have already read some scalar content. - state.result += ' '; - } - // Several line breaks - perceive as different lines. - } else { - state.result += common.repeat('\n', emptyLines); - } +exports.realpath = function realpath(p, cache, cb) { + if (typeof cb !== 'function') { + cb = maybeCallback(cache); + cache = null; + } + + // make p is absolute + p = pathModule.resolve(p); + + if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { + return process.nextTick(cb.bind(null, null, cache[p])); + } + + var original = p, + seenLinks = {}, + knownHard = {}; + + // current character position in p + var pos; + // the partial path so far, including a trailing slash if any + var current; + // the partial path without a trailing slash (except when pointing at a root) + var base; + // the partial path scanned in the previous round, with slash + var previous; + + start(); + + function start() { + // Skip over roots + var m = splitRootRe.exec(p); + pos = m[0].length; + current = m[0]; + base = m[0]; + previous = ''; - // Literal style: just add exact number of line breaks between content lines. + // On windows, check that the root exists. On unix there is no need. + if (isWindows && !knownHard[base]) { + fs.lstat(base, function(err) { + if (err) return cb(err); + knownHard[base] = true; + LOOP(); + }); } else { - // Keep all line breaks except the header line break. - state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + process.nextTick(LOOP); } + } - didReadContent = true; - detectedIndent = true; - emptyLines = 0; - captureStart = state.position; - - while (!is_EOL(ch) && (ch !== 0)) { - ch = state.input.charCodeAt(++state.position); + // walk down the path, swapping out linked pathparts for their real + // values + function LOOP() { + // stop if scanned past end of path + if (pos >= p.length) { + if (cache) cache[original] = p; + return cb(null, p); } - captureSegment(state, captureStart, state.position, false); - } + // find the next part + nextPartRe.lastIndex = pos; + var result = nextPartRe.exec(p); + previous = current; + current += result[0]; + base = previous + result[1]; + pos = nextPartRe.lastIndex; - return true; -} + // continue if not a symlink + if (knownHard[base] || (cache && cache[base] === base)) { + return process.nextTick(LOOP); + } -function readBlockSequence(state, nodeIndent) { - var _line, - _tag = state.tag, - _anchor = state.anchor, - _result = [], - following, - detected = false, - ch; + if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { + // known symbolic link. no need to stat again. + return gotResolvedLink(cache[base]); + } - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; + return fs.lstat(base, gotStat); } - ch = state.input.charCodeAt(state.position); - - while (ch !== 0) { - - if (ch !== 0x2D/* - */) { - break; - } - - following = state.input.charCodeAt(state.position + 1); + function gotStat(err, stat) { + if (err) return cb(err); - if (!is_WS_OR_EOL(following)) { - break; + // if not a symlink, skip to the next path part + if (!stat.isSymbolicLink()) { + knownHard[base] = true; + if (cache) cache[base] = base; + return process.nextTick(LOOP); } - detected = true; - state.position++; - - if (skipSeparationSpace(state, true, -1)) { - if (state.lineIndent <= nodeIndent) { - _result.push(null); - ch = state.input.charCodeAt(state.position); - continue; + // stat & read the link if not read before + // call gotTarget as soon as the link target is known + // dev/ino always return 0 on windows, so skip the check. + if (!isWindows) { + var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); + if (seenLinks.hasOwnProperty(id)) { + return gotTarget(null, seenLinks[id], base); } } + fs.stat(base, function(err) { + if (err) return cb(err); - _line = state.line; - composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); - _result.push(state.result); - skipSeparationSpace(state, true, -1); + fs.readlink(base, function(err, target) { + if (!isWindows) seenLinks[id] = target; + gotTarget(err, target); + }); + }); + } - ch = state.input.charCodeAt(state.position); + function gotTarget(err, target, base) { + if (err) return cb(err); - if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { - throwError(state, 'bad indentation of a sequence entry'); - } else if (state.lineIndent < nodeIndent) { - break; - } + var resolvedLink = pathModule.resolve(previous, target); + if (cache) cache[base] = resolvedLink; + gotResolvedLink(resolvedLink); } - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = 'sequence'; - state.result = _result; - return true; + function gotResolvedLink(resolvedLink) { + // resolve the link, then start over + p = pathModule.resolve(resolvedLink, p.slice(pos)); + start(); } - return false; -} +}; -function readBlockMapping(state, nodeIndent, flowIndent) { - var following, - allowCompact, - _line, - _pos, - _tag = state.tag, - _anchor = state.anchor, - _result = {}, - overridableKeys = {}, - keyTag = null, - keyNode = null, - valueNode = null, - atExplicitKey = false, - detected = false, - ch; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = _result; - } +/***/ }), +/* 118 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - ch = state.input.charCodeAt(state.position); +"use strict"; - while (ch !== 0) { - following = state.input.charCodeAt(state.position + 1); - _line = state.line; // Save the current line. - _pos = state.position; +const os = __webpack_require__(87); - // - // Explicit notation case. There are two separate blocks: - // first for the key (denoted by "?") and second for the value (denoted by ":") - // - if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { +const nameMap = new Map([ + [20, ['Big Sur', '11']], + [19, ['Catalina', '10.15']], + [18, ['Mojave', '10.14']], + [17, ['High Sierra', '10.13']], + [16, ['Sierra', '10.12']], + [15, ['El Capitan', '10.11']], + [14, ['Yosemite', '10.10']], + [13, ['Mavericks', '10.9']], + [12, ['Mountain Lion', '10.8']], + [11, ['Lion', '10.7']], + [10, ['Snow Leopard', '10.6']], + [9, ['Leopard', '10.5']], + [8, ['Tiger', '10.4']], + [7, ['Panther', '10.3']], + [6, ['Jaguar', '10.2']], + [5, ['Puma', '10.1']] +]); - if (ch === 0x3F/* ? */) { - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); - keyTag = keyNode = valueNode = null; - } +const macosRelease = release => { + release = Number((release || os.release()).split('.')[0]); - detected = true; - atExplicitKey = true; - allowCompact = true; + const [name, version] = nameMap.get(release); - } else if (atExplicitKey) { - // i.e. 0x3A/* : */ === character after the explicit key. - atExplicitKey = false; - allowCompact = true; + return { + name, + version + }; +}; - } else { - throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); - } +module.exports = macosRelease; +// TODO: remove this in the next major version +module.exports.default = macosRelease; - state.position += 1; - ch = following; - // - // Implicit notation case. Flow-style node as the key first, then ":", and the value. - // - } else if (composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { +/***/ }), +/* 119 */, +/* 120 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - if (state.line === _line) { - ch = state.input.charCodeAt(state.position); +// Approach: +// +// 1. Get the minimatch set +// 2. For each pattern in the set, PROCESS(pattern, false) +// 3. Store matches per-set, then uniq them +// +// PROCESS(pattern, inGlobStar) +// Get the first [n] items from pattern that are all strings +// Join these together. This is PREFIX. +// If there is no more remaining, then stat(PREFIX) and +// add to matches if it succeeds. END. +// +// If inGlobStar and PREFIX is symlink and points to dir +// set ENTRIES = [] +// else readdir(PREFIX) as ENTRIES +// If fail, END +// +// with ENTRIES +// If pattern[n] is GLOBSTAR +// // handle the case where the globstar match is empty +// // by pruning it out, and testing the resulting pattern +// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) +// // handle other cases. +// for ENTRY in ENTRIES (not dotfiles) +// // attach globstar + tail onto the entry +// // Mark that this entry is a globstar match +// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) +// +// else // not globstar +// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) +// Test ENTRY against pattern[n] +// If fails, continue +// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) +// +// Caveat: +// Cache all stats and readdirs results to minimize syscall. Since all +// we ever care about is existence and directory-ness, we can just keep +// `true` for files, and [children,...] for directories, or `false` for +// things that don't exist. - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } +module.exports = glob - if (ch === 0x3A/* : */) { - ch = state.input.charCodeAt(++state.position); +var fs = __webpack_require__(747) +var rp = __webpack_require__(302) +var minimatch = __webpack_require__(595) +var Minimatch = minimatch.Minimatch +var inherits = __webpack_require__(689) +var EE = __webpack_require__(614).EventEmitter +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(681) +var globSync = __webpack_require__(245) +var common = __webpack_require__(856) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var inflight = __webpack_require__(634) +var util = __webpack_require__(669) +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored - if (!is_WS_OR_EOL(ch)) { - throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); - } +var once = __webpack_require__(49) - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); - keyTag = keyNode = valueNode = null; - } +function glob (pattern, options, cb) { + if (typeof options === 'function') cb = options, options = {} + if (!options) options = {} - detected = true; - atExplicitKey = false; - allowCompact = false; - keyTag = state.tag; - keyNode = state.result; + if (options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return globSync(pattern, options) + } - } else if (detected) { - throwError(state, 'can not read an implicit mapping pair; a colon is missed'); + return new Glob(pattern, options, cb) +} - } else { - state.tag = _tag; - state.anchor = _anchor; - return true; // Keep the result of `composeNode`. - } +glob.sync = globSync +var GlobSync = glob.GlobSync = globSync.GlobSync - } else if (detected) { - throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); +// old api surface +glob.glob = glob - } else { - state.tag = _tag; - state.anchor = _anchor; - return true; // Keep the result of `composeNode`. - } +function extend (origin, add) { + if (add === null || typeof add !== 'object') { + return origin + } - } else { - break; // Reading is done. Go to the epilogue. - } + var keys = Object.keys(add) + var i = keys.length + while (i--) { + origin[keys[i]] = add[keys[i]] + } + return origin +} - // - // Common reading code for both explicit and implicit notations. - // - if (state.line === _line || state.lineIndent > nodeIndent) { - if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { - if (atExplicitKey) { - keyNode = state.result; - } else { - valueNode = state.result; - } - } +glob.hasMagic = function (pattern, options_) { + var options = extend({}, options_) + options.noprocess = true - if (!atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _pos); - keyTag = keyNode = valueNode = null; - } + var g = new Glob(pattern, options) + var set = g.minimatch.set - skipSeparationSpace(state, true, -1); - ch = state.input.charCodeAt(state.position); - } + if (!pattern) + return false - if (state.lineIndent > nodeIndent && (ch !== 0)) { - throwError(state, 'bad indentation of a mapping entry'); - } else if (state.lineIndent < nodeIndent) { - break; - } + if (set.length > 1) + return true + + for (var j = 0; j < set[0].length; j++) { + if (typeof set[0][j] !== 'string') + return true } - // - // Epilogue. - // + return false +} - // Special case: last mapping's node contains only the key in explicit notation. - if (atExplicitKey) { - storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); +glob.Glob = Glob +inherits(Glob, EE) +function Glob (pattern, options, cb) { + if (typeof options === 'function') { + cb = options + options = null } - // Expose the resulting mapping. - if (detected) { - state.tag = _tag; - state.anchor = _anchor; - state.kind = 'mapping'; - state.result = _result; + if (options && options.sync) { + if (cb) + throw new TypeError('callback provided to sync glob') + return new GlobSync(pattern, options) } - return detected; -} + if (!(this instanceof Glob)) + return new Glob(pattern, options, cb) -function readTagProperty(state) { - var _position, - isVerbatim = false, - isNamed = false, - tagHandle, - tagName, - ch; + setopts(this, pattern, options) + this._didRealPath = false - ch = state.input.charCodeAt(state.position); + // process each pattern in the minimatch set + var n = this.minimatch.set.length - if (ch !== 0x21/* ! */) return false; + // The matches are stored as {: true,...} so that + // duplicates are automagically pruned. + // Later, we do an Object.keys() on these. + // Keep them as a list so we can fill in when nonull is set. + this.matches = new Array(n) - if (state.tag !== null) { - throwError(state, 'duplication of a tag property'); + if (typeof cb === 'function') { + cb = once(cb) + this.on('error', cb) + this.on('end', function (matches) { + cb(null, matches) + }) } - ch = state.input.charCodeAt(++state.position); - - if (ch === 0x3C/* < */) { - isVerbatim = true; - ch = state.input.charCodeAt(++state.position); + var self = this + this._processing = 0 - } else if (ch === 0x21/* ! */) { - isNamed = true; - tagHandle = '!!'; - ch = state.input.charCodeAt(++state.position); + this._emitQueue = [] + this._processQueue = [] + this.paused = false - } else { - tagHandle = '!'; - } + if (this.noprocess) + return this - _position = state.position; + if (n === 0) + return done() - if (isVerbatim) { - do { ch = state.input.charCodeAt(++state.position); } - while (ch !== 0 && ch !== 0x3E/* > */); + var sync = true + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false, done) + } + sync = false - if (state.position < state.length) { - tagName = state.input.slice(_position, state.position); - ch = state.input.charCodeAt(++state.position); - } else { - throwError(state, 'unexpected end of the stream within a verbatim tag'); + function done () { + --self._processing + if (self._processing <= 0) { + if (sync) { + process.nextTick(function () { + self._finish() + }) + } else { + self._finish() + } } - } else { - while (ch !== 0 && !is_WS_OR_EOL(ch)) { + } +} - if (ch === 0x21/* ! */) { - if (!isNamed) { - tagHandle = state.input.slice(_position - 1, state.position + 1); +Glob.prototype._finish = function () { + assert(this instanceof Glob) + if (this.aborted) + return - if (!PATTERN_TAG_HANDLE.test(tagHandle)) { - throwError(state, 'named tag handle cannot contain such characters'); - } + if (this.realpath && !this._didRealpath) + return this._realpath() - isNamed = true; - _position = state.position + 1; - } else { - throwError(state, 'tag suffix cannot contain exclamation marks'); - } - } + common.finish(this) + this.emit('end', this.found) +} - ch = state.input.charCodeAt(++state.position); - } +Glob.prototype._realpath = function () { + if (this._didRealpath) + return - tagName = state.input.slice(_position, state.position); + this._didRealpath = true - if (PATTERN_FLOW_INDICATORS.test(tagName)) { - throwError(state, 'tag suffix cannot contain flow indicator characters'); - } - } + var n = this.matches.length + if (n === 0) + return this._finish() + + var self = this + for (var i = 0; i < this.matches.length; i++) + this._realpathSet(i, next) - if (tagName && !PATTERN_TAG_URI.test(tagName)) { - throwError(state, 'tag name cannot contain such characters: ' + tagName); + function next () { + if (--n === 0) + self._finish() } +} - if (isVerbatim) { - state.tag = tagName; +Glob.prototype._realpathSet = function (index, cb) { + var matchset = this.matches[index] + if (!matchset) + return cb() - } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { - state.tag = state.tagMap[tagHandle] + tagName; + var found = Object.keys(matchset) + var self = this + var n = found.length - } else if (tagHandle === '!') { - state.tag = '!' + tagName; + if (n === 0) + return cb() - } else if (tagHandle === '!!') { - state.tag = 'tag:yaml.org,2002:' + tagName; + var set = this.matches[index] = Object.create(null) + found.forEach(function (p, i) { + // If there's a problem with the stat, then it means that + // one or more of the links in the realpath couldn't be + // resolved. just return the abs value in that case. + p = self._makeAbs(p) + rp.realpath(p, self.realpathCache, function (er, real) { + if (!er) + set[real] = true + else if (er.syscall === 'stat') + set[p] = true + else + self.emit('error', er) // srsly wtf right here - } else { - throwError(state, 'undeclared tag handle "' + tagHandle + '"'); - } + if (--n === 0) { + self.matches[index] = set + cb() + } + }) + }) +} - return true; +Glob.prototype._mark = function (p) { + return common.mark(this, p) } -function readAnchorProperty(state) { - var _position, - ch; +Glob.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} - ch = state.input.charCodeAt(state.position); +Glob.prototype.abort = function () { + this.aborted = true + this.emit('abort') +} - if (ch !== 0x26/* & */) return false; +Glob.prototype.pause = function () { + if (!this.paused) { + this.paused = true + this.emit('pause') + } +} - if (state.anchor !== null) { - throwError(state, 'duplication of an anchor property'); +Glob.prototype.resume = function () { + if (this.paused) { + this.emit('resume') + this.paused = false + if (this._emitQueue.length) { + var eq = this._emitQueue.slice(0) + this._emitQueue.length = 0 + for (var i = 0; i < eq.length; i ++) { + var e = eq[i] + this._emitMatch(e[0], e[1]) + } + } + if (this._processQueue.length) { + var pq = this._processQueue.slice(0) + this._processQueue.length = 0 + for (var i = 0; i < pq.length; i ++) { + var p = pq[i] + this._processing-- + this._process(p[0], p[1], p[2], p[3]) + } + } } +} - ch = state.input.charCodeAt(++state.position); - _position = state.position; +Glob.prototype._process = function (pattern, index, inGlobStar, cb) { + assert(this instanceof Glob) + assert(typeof cb === 'function') - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); - } + if (this.aborted) + return - if (state.position === _position) { - throwError(state, 'name of an anchor node must contain at least one character'); + this._processing++ + if (this.paused) { + this._processQueue.push([pattern, index, inGlobStar, cb]) + return } - state.anchor = state.input.slice(_position, state.position); - return true; -} - -function readAlias(state) { - var _position, alias, - ch; + //console.error('PROCESS %d', this._processing, pattern) - ch = state.input.charCodeAt(state.position); + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. - if (ch !== 0x2A/* * */) return false; + // see if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index, cb) + return - ch = state.input.charCodeAt(++state.position); - _position = state.position; + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break - while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { - ch = state.input.charCodeAt(++state.position); + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break } - if (state.position === _position) { - throwError(state, 'name of an alias node must contain at least one character'); - } + var remain = pattern.slice(n) - alias = state.input.slice(_position, state.position); + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix - if (!state.anchorMap.hasOwnProperty(alias)) { - throwError(state, 'unidentified alias "' + alias + '"'); - } + var abs = this._makeAbs(read) - state.result = state.anchorMap[alias]; - skipSeparationSpace(state, true, -1); - return true; + //if ignored, skip _processing + if (childrenIgnored(this, read)) + return cb() + + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) } -function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { - var allowBlockStyles, - allowBlockScalars, - allowBlockCollections, - indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) + + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e } - } - } - - if (indentStatus === 1) { - while (readTagProperty(state) || readAnchorProperty(state)) { - if (skipSeparationSpace(state, true, -1)) { - atNewLine = true; - allowBlockCollections = allowBlockStyles; - if (state.lineIndent > parentIndent) { - indentStatus = 1; - } else if (state.lineIndent === parentIndent) { - indentStatus = 0; - } else if (state.lineIndent < parentIndent) { - indentStatus = -1; - } - } else { - allowBlockCollections = false; + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) } + this._emitMatch(index, e) } + // This was the last one, and no stats were needed + return cb() } - if (allowBlockCollections) { - allowBlockCollections = atNewLine || allowCompact; + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) { + if (prefix !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + this._process([e].concat(remain), index, inGlobStar, cb) } + cb() +} - if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { - if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { - flowIndent = parentIndent; - } else { - flowIndent = parentIndent + 1; - } +Glob.prototype._emitMatch = function (index, e) { + if (this.aborted) + return - blockIndent = state.position - state.lineStart; + if (isIgnored(this, e)) + return - if (indentStatus === 1) { - if (allowBlockCollections && - (readBlockSequence(state, blockIndent) || - readBlockMapping(state, blockIndent, flowIndent)) || - readFlowCollection(state, flowIndent)) { - hasContent = true; - } else { - if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || - readSingleQuotedScalar(state, flowIndent) || - readDoubleQuotedScalar(state, flowIndent)) { - hasContent = true; + if (this.paused) { + this._emitQueue.push([index, e]) + return + } - } else if (readAlias(state)) { - hasContent = true; + var abs = isAbsolute(e) ? e : this._makeAbs(e) - if (state.tag !== null || state.anchor !== null) { - throwError(state, 'alias node should not have any properties'); - } + if (this.mark) + e = this._mark(e) - } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { - hasContent = true; + if (this.absolute) + e = abs - if (state.tag === null) { - state.tag = '?'; - } - } + if (this.matches[index][e]) + return - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - } - } else if (indentStatus === 0) { - // Special case: block sequences are allowed to have same indentation level as the parent. - // http://www.yaml.org/spec/1.2/spec.html#id2799784 - hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); - } + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return } - if (state.tag !== null && state.tag !== '!') { - if (state.tag === '?') { - // Implicit resolving is not allowed for non-scalar types, and '?' - // non-specific tag is only automatically assigned to plain scalars. - // - // We only need to check kind conformity in case user explicitly assigns '?' - // tag, for example like this: "! [0]" - // - if (state.result !== null && state.kind !== 'scalar') { - throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); - } + this.matches[index][e] = true - for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { - type = state.implicitTypes[typeIndex]; + var st = this.statCache[abs] + if (st) + this.emit('stat', e, st) - if (type.resolve(state.result)) { // `state.result` updated in resolver if matched - state.result = type.construct(state.result); - state.tag = type.tag; - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - break; - } - } - } else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { - type = state.typeMap[state.kind || 'fallback'][state.tag]; + this.emit('match', e) +} - if (state.result !== null && type.kind !== state.kind) { - throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); - } +Glob.prototype._readdirInGlobStar = function (abs, cb) { + if (this.aborted) + return - if (!type.resolve(state.result)) { // `state.result` updated in resolver if matched - throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); - } else { - state.result = type.construct(state.result); - if (state.anchor !== null) { - state.anchorMap[state.anchor] = state.result; - } - } - } else { - throwError(state, 'unknown tag !<' + state.tag + '>'); - } - } + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false, cb) - if (state.listener !== null) { - state.listener('close', state); + var lstatkey = 'lstat\0' + abs + var self = this + var lstatcb = inflight(lstatkey, lstatcb_) + + if (lstatcb) + fs.lstat(abs, lstatcb) + + function lstatcb_ (er, lstat) { + if (er && er.code === 'ENOENT') + return cb() + + var isSym = lstat && lstat.isSymbolicLink() + self.symlinks[abs] = isSym + + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) { + self.cache[abs] = 'FILE' + cb() + } else + self._readdir(abs, false, cb) } - return state.tag !== null || state.anchor !== null || hasContent; } -function readDocument(state) { - var documentStart = state.position, - _position, - directiveName, - directiveArgs, - hasDirectives = false, - ch; +Glob.prototype._readdir = function (abs, inGlobStar, cb) { + if (this.aborted) + return - state.version = null; - state.checkLineBreaks = state.legacy; - state.tagMap = {}; - state.anchorMap = {}; + cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) + if (!cb) + return - while ((ch = state.input.charCodeAt(state.position)) !== 0) { - skipSeparationSpace(state, true, -1); + //console.error('RD %j %j', +inGlobStar, abs) + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs, cb) - ch = state.input.charCodeAt(state.position); + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return cb() - if (state.lineIndent > 0 || ch !== 0x25/* % */) { - break; - } + if (Array.isArray(c)) + return cb(null, c) + } - hasDirectives = true; - ch = state.input.charCodeAt(++state.position); - _position = state.position; + var self = this + fs.readdir(abs, readdirCb(this, abs, cb)) +} - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); - } +function readdirCb (self, abs, cb) { + return function (er, entries) { + if (er) + self._readdirError(abs, er, cb) + else + self._readdirEntries(abs, entries, cb) + } +} - directiveName = state.input.slice(_position, state.position); - directiveArgs = []; +Glob.prototype._readdirEntries = function (abs, entries, cb) { + if (this.aborted) + return - if (directiveName.length < 1) { - throwError(state, 'directive name must not be less than one character in length'); + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true } + } - while (ch !== 0) { - while (is_WHITE_SPACE(ch)) { - ch = state.input.charCodeAt(++state.position); - } + this.cache[abs] = entries + return cb(null, entries) +} - if (ch === 0x23/* # */) { - do { ch = state.input.charCodeAt(++state.position); } - while (ch !== 0 && !is_EOL(ch)); - break; - } +Glob.prototype._readdirError = function (f, er, cb) { + if (this.aborted) + return - if (is_EOL(ch)) break; + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + this.emit('error', error) + this.abort() + } + break - _position = state.position; + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break - while (ch !== 0 && !is_WS_OR_EOL(ch)) { - ch = state.input.charCodeAt(++state.position); + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) { + this.emit('error', er) + // If the error is handled, then we abort + // if not, we threw out of here + this.abort() } + if (!this.silent) + console.error('glob error', er) + break + } - directiveArgs.push(state.input.slice(_position, state.position)); - } + return cb() +} - if (ch !== 0) readLineBreak(state); +Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { + var self = this + this._readdir(abs, inGlobStar, function (er, entries) { + self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) + }) +} - if (_hasOwnProperty.call(directiveHandlers, directiveName)) { - directiveHandlers[directiveName](state, directiveName, directiveArgs); - } else { - throwWarning(state, 'unknown document directive "' + directiveName + '"'); - } - } - skipSeparationSpace(state, true, -1); +Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { + //console.error('pgs2', prefix, remain[0], entries) - if (state.lineIndent === 0 && - state.input.charCodeAt(state.position) === 0x2D/* - */ && - state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && - state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { - state.position += 3; - skipSeparationSpace(state, true, -1); + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return cb() - } else if (hasDirectives) { - throwError(state, 'directives end mark is expected'); - } + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) - composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); - skipSeparationSpace(state, true, -1); + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false, cb) - if (state.checkLineBreaks && - PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { - throwWarning(state, 'non-ASCII line breaks are interpreted as content'); - } + var isSym = this.symlinks[abs] + var len = entries.length - state.documents.push(state.result); + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return cb() - if (state.position === state.lineStart && testDocumentSeparator(state)) { + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue - if (state.input.charCodeAt(state.position) === 0x2E/* . */) { - state.position += 3; - skipSeparationSpace(state, true, -1); - } - return; - } + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true, cb) - if (state.position < (state.length - 1)) { - throwError(state, 'end of the stream or a document separator is expected'); - } else { - return; + var below = gspref.concat(entries[i], remain) + this._process(below, index, true, cb) } + + cb() } +Glob.prototype._processSimple = function (prefix, index, cb) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var self = this + this._stat(prefix, function (er, exists) { + self._processSimple2(prefix, index, er, exists, cb) + }) +} +Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { -function loadDocuments(input, options) { - input = String(input); - options = options || {}; + //console.error('ps2', prefix, exists) - if (input.length !== 0) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) - // Add tailing `\n` if not exists - if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && - input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { - input += '\n'; - } + // If it doesn't exist, then just mark the lack of results + if (!exists) + return cb() - // Strip BOM - if (input.charCodeAt(0) === 0xFEFF) { - input = input.slice(1); + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' } } - var state = new State(input, options); + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') - var nullpos = input.indexOf('\0'); + // Mark this as a match + this._emitMatch(index, prefix) + cb() +} - if (nullpos !== -1) { - state.position = nullpos; - throwError(state, 'null byte is not allowed in input'); - } +// Returns either 'DIR', 'FILE', or false +Glob.prototype._stat = function (f, cb) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' - // Use 0 as string terminator. That significantly simplifies bounds check. - state.input += '\0'; + if (f.length > this.maxLength) + return cb() - while (state.input.charCodeAt(state.position) === 0x20/* Space */) { - state.lineIndent += 1; - state.position += 1; - } + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] - while (state.position < (state.length - 1)) { - readDocument(state); - } + if (Array.isArray(c)) + c = 'DIR' - return state.documents; -} + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return cb(null, c) + if (needDir && c === 'FILE') + return cb() -function loadAll(input, iterator, options) { - if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { - options = iterator; - iterator = null; + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. } - var documents = loadDocuments(input, options); - - if (typeof iterator !== 'function') { - return documents; + var exists + var stat = this.statCache[abs] + if (stat !== undefined) { + if (stat === false) + return cb(null, stat) + else { + var type = stat.isDirectory() ? 'DIR' : 'FILE' + if (needDir && type === 'FILE') + return cb() + else + return cb(null, type, stat) + } } - for (var index = 0, length = documents.length; index < length; index += 1) { - iterator(documents[index]); + var self = this + var statcb = inflight('stat\0' + abs, lstatcb_) + if (statcb) + fs.lstat(abs, statcb) + + function lstatcb_ (er, lstat) { + if (lstat && lstat.isSymbolicLink()) { + // If it's a symlink, then treat it as the target, unless + // the target does not exist, then treat it as a file. + return fs.stat(abs, function (er, stat) { + if (er) + self._stat2(f, abs, null, lstat, cb) + else + self._stat2(f, abs, er, stat, cb) + }) + } else { + self._stat2(f, abs, er, lstat, cb) + } } } - -function load(input, options) { - var documents = loadDocuments(input, options); - - if (documents.length === 0) { - /*eslint-disable no-undefined*/ - return undefined; - } else if (documents.length === 1) { - return documents[0]; +Glob.prototype._stat2 = function (f, abs, er, stat, cb) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return cb() } - throw new YAMLException('expected a single document in the stream, but found more'); -} + var needDir = f.slice(-1) === '/' + this.statCache[abs] = stat -function safeLoadAll(input, iterator, options) { - if (typeof iterator === 'object' && iterator !== null && typeof options === 'undefined') { - options = iterator; - iterator = null; - } + if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) + return cb(null, false, stat) + + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' + this.cache[abs] = this.cache[abs] || c - return loadAll(input, iterator, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); + if (needDir && c === 'FILE') + return cb() + + return cb(null, c, stat) } -function safeLoad(input, options) { - return load(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); -} +/***/ }), +/* 121 */, +/* 122 */, +/* 123 */, +/* 124 */, +/* 125 */, +/* 126 */, +/* 127 */ +/***/ (function(module, __unusedexports, __webpack_require__) { +const assert = __webpack_require__(357); -module.exports.loadAll = loadAll; -module.exports.load = load; -module.exports.safeLoadAll = safeLoadAll; -module.exports.safeLoad = safeLoad; +const allowedVersions = ["2.0"]; +function validateDefinition(definition) { + assert( + definition, + "definition file is empty or couldn't be loaded, please check 'definition-file' input" + ); + validateVersion(definition.version, "definition"); +} -/***/ }), -/* 637 */, -/* 638 */, -/* 639 */, -/* 640 */, -/* 641 */, -/* 642 */, -/* 643 */, -/* 644 */, -/* 645 */ -/***/ (function(module) { +function validateDependencies(dependencies) { + validateVersion(dependencies.version, "dependencies"); +} -function getNodeTriggeringJob(context, nodeChain) { - return nodeChain.find( - e => - e.project === context.config.github.repository || - e.project === context.config.github.project +function validateVersion(version, fileName) { + assert( + version, + `version is not defined on ${fileName} file. Please add version: x, where x is one of these values ${allowedVersions}` + ); + assert( + allowedVersions.includes(version), + `version ${version} is not allowed in ${fileName} file. Allowed versions: ${allowedVersions}` ); } -module.exports = { getNodeTriggeringJob }; +function validateNode(node) { + assert( + node, + "node is undefined. Please check your definition file. For example, projects declared as a dependency for another project has to be already defined (I mean from order point of view)." + ); + assert( + node.project, + "project can't be undefined, please properly define your file." + ); + assert( + node.project.split("/").length === 2, + `project has to defined following \`group/project\` pattern (i.e. \`kiegroup/drools\`). "${node.project}" instead` + ); +} + +module.exports = { validateDefinition, validateDependencies, validateNode }; /***/ }), -/* 646 */, -/* 647 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +/* 128 */, +/* 129 */ +/***/ (function(module) { -"use strict"; - -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var __asyncValues = (this && this.__asyncValues) || function (o) { - if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); - var m = o[Symbol.asyncIterator], i; - return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); - function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } - function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } -}; -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; - result["default"] = mod; - return result; -}; -Object.defineProperty(exports, "__esModule", { value: true }); -const fs = __importStar(__webpack_require__(747)); -const zlib = __importStar(__webpack_require__(761)); -const util_1 = __webpack_require__(669); -const stat = util_1.promisify(fs.stat); -/** - * Creates a Gzip compressed file of an original file at the provided temporary filepath location - * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified - * @param {string} tempFilePath the location of where the Gzip file will be created - * @returns the size of gzip file that gets created - */ -function createGZipFileOnDisk(originalFilePath, tempFilePath) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve, reject) => { - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - const outputStream = fs.createWriteStream(tempFilePath); - inputStream.pipe(gzip).pipe(outputStream); - outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { - // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload - const size = (yield stat(tempFilePath)).size; - resolve(size); - })); - outputStream.on('error', error => { - // eslint-disable-next-line no-console - console.log(error); - reject; - }); - }); - }); -} -exports.createGZipFileOnDisk = createGZipFileOnDisk; -/** - * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O - * @param originalFilePath the path to the original file that is being GZipped - * @returns a buffer with the GZip file - */ -function createGZipFileInBuffer(originalFilePath) { - return __awaiter(this, void 0, void 0, function* () { - return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - var e_1, _a; - const inputStream = fs.createReadStream(originalFilePath); - const gzip = zlib.createGzip(); - inputStream.pipe(gzip); - // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 - const chunks = []; - try { - for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { - const chunk = gzip_1_1.value; - chunks.push(chunk); - } - } - catch (e_1_1) { e_1 = { error: e_1_1 }; } - finally { - try { - if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); - } - finally { if (e_1) throw e_1.error; } - } - resolve(Buffer.concat(chunks)); - })); - }); -} -exports.createGZipFileInBuffer = createGZipFileInBuffer; -//# sourceMappingURL=upload-gzip.js.map +module.exports = require("child_process"); /***/ }), -/* 648 */, -/* 649 */, -/* 650 */, -/* 651 */, -/* 652 */, -/* 653 */, -/* 654 */ +/* 130 */, +/* 131 */, +/* 132 */, +/* 133 */, +/* 134 */, +/* 135 */, +/* 136 */, +/* 137 */ /***/ (function(module, __unusedexports, __webpack_require__) { -"use strict"; - - -var Type = __webpack_require__(547); - -module.exports = new Type('tag:yaml.org,2002:seq', { - kind: 'sequence', - construct: function (data) { return data !== null ? data : []; } -}); +const { + checkoutDefinitionTree, + getPlaceHolders +} = __webpack_require__(330); +const { executeBuild } = __webpack_require__(11); +const { + getOrderedListForProject +} = __webpack_require__(352); +const { printCheckoutInformation } = __webpack_require__(656); +const { logger } = __webpack_require__(79); +const core = __webpack_require__(470); +const { + archiveArtifacts +} = __webpack_require__(503); -/***/ }), -/* 655 */, -/* 656 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +async function start(context, isArchiveArtifacts = true) { + core.startGroup( + `[Full Downstream Flow] Checking out ${context.config.github.groupProject} and its dependencies` + ); + const nodeChain = await getOrderedListForProject( + context.config.github.inputs.definitionFile, + context.config.github.repository, + await getPlaceHolders(context, context.config.github.inputs.definitionFile) + ); -const { logger } = __webpack_require__(79); + logger.info( + `Tree for project ${ + context.config.github.repository + }. Chain: ${nodeChain.map(node => "\n" + node.project)}` + ); + const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); + core.endGroup(); -const groupBy = (checkoutInfo, key) => { - return Object.values(checkoutInfo).reduce((acc, checkInfo) => { - if (checkInfo) { - (acc[checkInfo[key]] = acc[checkInfo[key]] || []).push(checkInfo); - } - return acc; - }, {}); -}; + core.startGroup(`[Full Downstream Flow] Checkout Summary...`); + printCheckoutInformation(checkoutInfo); + core.endGroup(); -function printCheckoutInformation(checkoutInfo) { - if (checkoutInfo && Object.keys(checkoutInfo).length) { - logger.info("----------------------------------------------"); - Object.entries(checkoutInfo).forEach(([project, checkInfo]) => - logger.info( - checkInfo - ? `${checkInfo.group}/${checkInfo.project}:${checkInfo.branch}.${ - checkInfo.merge - ? ` It has Been merged with ${checkInfo.targetGroup}/${checkInfo.project}:${checkInfo.targetBranch}` - : "" - }` - : `${project}: No checkout information` - ) + const executionResult = await executeBuild( + context.config.rootFolder, + nodeChain, + context.config.github.repository + ) + .then(() => true) + .catch(e => e); + + if (isArchiveArtifacts) { + core.startGroup(`[Full Downstream Flow] Archiving artifacts...`); + await archiveArtifacts( + nodeChain.find(node => node.project === context.config.github.repository), + nodeChain, + executionResult === true ? ["success", "always"] : ["failure", "always"] ); - logger.info("----------------------------------------------"); - Object.entries(groupBy(checkoutInfo, "branch")).forEach( - ([branch, checkoutInfoList]) => { - logger.info( - `Projects taken from branch "${branch}":${checkoutInfoList.map( - checkInfo => ` - ${checkInfo.group}/${checkInfo.project}${ - checkInfo.merge - ? `. Merged with ${checkInfo.targetGroup}/${checkInfo.project}:${checkInfo.targetBranch}` - : "" - }` - )}` - ); - } + core.endGroup(); + } else { + logger.info("Archive artifact won't be executed"); + } + + if (executionResult !== true) { + logger.error(executionResult); + throw new Error( + `Command executions have failed, please review latest execution ${executionResult}` ); - logger.info("----------------------------------------------"); } } module.exports = { - printCheckoutInformation + start }; /***/ }), -/* 657 */, -/* 658 */, -/* 659 */, -/* 660 */, -/* 661 */, -/* 662 */, -/* 663 */, -/* 664 */, -/* 665 */, -/* 666 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -let _fs -try { - _fs = __webpack_require__(598) -} catch (_) { - _fs = __webpack_require__(747) -} -const universalify = __webpack_require__(0) -const { stringify, stripBom } = __webpack_require__(477) +/* 138 */, +/* 139 */, +/* 140 */, +/* 141 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { -async function _readFile (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } +"use strict"; - const fs = options.fs || _fs - const shouldThrow = 'throws' in options ? options.throws : true +var net = __webpack_require__(631); +var tls = __webpack_require__(16); +var http = __webpack_require__(605); +var https = __webpack_require__(211); +var events = __webpack_require__(614); +var assert = __webpack_require__(357); +var util = __webpack_require__(669); - let data = await universalify.fromCallback(fs.readFile)(file, options) - data = stripBom(data) +exports.httpOverHttp = httpOverHttp; +exports.httpsOverHttp = httpsOverHttp; +exports.httpOverHttps = httpOverHttps; +exports.httpsOverHttps = httpsOverHttps; - let obj - try { - obj = JSON.parse(data, options ? options.reviver : null) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } - return obj +function httpOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + return agent; } -const readFile = universalify.fromPromise(_readFile) - -function readFileSync (file, options = {}) { - if (typeof options === 'string') { - options = { encoding: options } - } - - const fs = options.fs || _fs - - const shouldThrow = 'throws' in options ? options.throws : true - - try { - let content = fs.readFileSync(file, options) - content = stripBom(content) - return JSON.parse(content, options.reviver) - } catch (err) { - if (shouldThrow) { - err.message = `${file}: ${err.message}` - throw err - } else { - return null - } - } +function httpsOverHttp(options) { + var agent = new TunnelingAgent(options); + agent.request = http.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -async function _writeFile (file, obj, options = {}) { - const fs = options.fs || _fs - - const str = stringify(obj, options) +function httpOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + return agent; +} - await universalify.fromCallback(fs.writeFile)(file, str, options) +function httpsOverHttps(options) { + var agent = new TunnelingAgent(options); + agent.request = https.request; + agent.createSocket = createSecureSocket; + agent.defaultPort = 443; + return agent; } -const writeFile = universalify.fromPromise(_writeFile) -function writeFileSync (file, obj, options = {}) { - const fs = options.fs || _fs +function TunnelingAgent(options) { + var self = this; + self.options = options || {}; + self.proxyOptions = self.options.proxy || {}; + self.maxSockets = self.options.maxSockets || http.Agent.defaultMaxSockets; + self.requests = []; + self.sockets = []; - const str = stringify(obj, options) - // not sure if fs.writeFileSync returns anything, but just in case - return fs.writeFileSync(file, str, options) + self.on('free', function onFree(socket, host, port, localAddress) { + var options = toOptions(host, port, localAddress); + for (var i = 0, len = self.requests.length; i < len; ++i) { + var pending = self.requests[i]; + if (pending.host === options.host && pending.port === options.port) { + // Detect the request to connect same origin server, + // reuse the connection. + self.requests.splice(i, 1); + pending.request.onSocket(socket); + return; + } + } + socket.destroy(); + self.removeSocket(socket); + }); } +util.inherits(TunnelingAgent, events.EventEmitter); -const jsonfile = { - readFile, - readFileSync, - writeFile, - writeFileSync -} +TunnelingAgent.prototype.addRequest = function addRequest(req, host, port, localAddress) { + var self = this; + var options = mergeOptions({request: req}, self.options, toOptions(host, port, localAddress)); -module.exports = jsonfile + if (self.sockets.length >= this.maxSockets) { + // We are over limit so we'll add it to the queue. + self.requests.push(options); + return; + } + // If we are under maxSockets create a new one. + self.createSocket(options, function(socket) { + socket.on('free', onFree); + socket.on('close', onCloseOrRemove); + socket.on('agentRemove', onCloseOrRemove); + req.onSocket(socket); -/***/ }), -/* 667 */, -/* 668 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + function onFree() { + self.emit('free', socket, options); + } -const { - getDefinitionFile, - getStartingProject -} = __webpack_require__(933); + function onCloseOrRemove(err) { + self.removeSocket(socket); + socket.removeListener('free', onFree); + socket.removeListener('close', onCloseOrRemove); + socket.removeListener('agentRemove', onCloseOrRemove); + } + }); +}; -function getInputs() { - return { - definitionFile: getDefinitionFile(), - startingProject: getStartingProject() - }; -} +TunnelingAgent.prototype.createSocket = function createSocket(options, cb) { + var self = this; + var placeholder = {}; + self.sockets.push(placeholder); -async function createCommonConfig(eventData, rootFolder, env) { - async function parseGitHub(eventData, env) { - return { - serverUrl: env["GITHUB_SERVER_URL"] - ? env["GITHUB_SERVER_URL"].replace(/\/$/, "") - : undefined, // https://github.com - action: env["GITHUB_ACTION"], // Ginxogithub-action-build-chain - sourceGroup: eventData.sourceGroup, - author: eventData.author, - actor: env["GITHUB_ACTOR"], // Ginxo - sourceBranch: env["GITHUB_HEAD_REF"], // Ginxo-patch-1 - targetBranch: env["GITHUB_BASE_REF"], // master - jobId: env["GITHUB_JOB"], // build-chain - sourceRepository: eventData.sourceRepository, - repository: env["GITHUB_REPOSITORY"], // Ginxo/lienzo-tests - group: env["GITHUB_REPOSITORY"].split("/")[0], // Ginxo - project: env["GITHUB_REPOSITORY"].split("/")[1], // lienzo-tests - groupProject: env["GITHUB_REPOSITORY"], - workflowName: env["GITHUB_WORKFLOW"], // Build Chain - ref: env["GITHUB_REF"], // refs/pull/1/merge' - inputs: getInputs() - }; + var connectOptions = mergeOptions({}, self.proxyOptions, { + method: 'CONNECT', + path: options.host + ':' + options.port, + agent: false, + headers: { + host: options.host + ':' + options.port + } + }); + if (options.localAddress) { + connectOptions.localAddress = options.localAddress; + } + if (connectOptions.proxyAuth) { + connectOptions.headers = connectOptions.headers || {}; + connectOptions.headers['Proxy-Authorization'] = 'Basic ' + + new Buffer(connectOptions.proxyAuth).toString('base64'); } - return { - github: await parseGitHub(eventData, env), - rootFolder: rootFolder === undefined ? "" : rootFolder - }; -} -module.exports = { - createCommonConfig, - getInputs -}; + debug('making CONNECT request'); + var connectReq = self.request(connectOptions); + connectReq.useChunkedEncodingByDefault = false; // for v0.6 + connectReq.once('response', onResponse); // for v0.6 + connectReq.once('upgrade', onUpgrade); // for v0.6 + connectReq.once('connect', onConnect); // for v0.7 or later + connectReq.once('error', onError); + connectReq.end(); + function onResponse(res) { + // Very hacky. This is necessary to avoid http-parser leaks. + res.upgrade = true; + } -/***/ }), -/* 669 */ -/***/ (function(module) { + function onUpgrade(res, socket, head) { + // Hacky. + process.nextTick(function() { + onConnect(res, socket, head); + }); + } -module.exports = require("util"); + function onConnect(res, socket, head) { + connectReq.removeAllListeners(); + socket.removeAllListeners(); -/***/ }), -/* 670 */, -/* 671 */, -/* 672 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { + if (res.statusCode !== 200) { + debug('tunneling socket could not be established, statusCode=%d', + res.statusCode); + socket.destroy(); + var error = new Error('tunneling socket could not be established, ' + + 'statusCode=' + res.statusCode); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + if (head.length > 0) { + debug('got illegal response body from proxy'); + socket.destroy(); + var error = new Error('got illegal response body from proxy'); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + return; + } + debug('tunneling connection has established'); + self.sockets[self.sockets.indexOf(placeholder)] = socket; + return cb(socket); + } -"use strict"; + function onError(cause) { + connectReq.removeAllListeners(); -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); + debug('tunneling socket could not be established, cause=%s\n', + cause.message, cause.stack); + var error = new Error('tunneling socket could not be established, ' + + 'cause=' + cause.message); + error.code = 'ECONNRESET'; + options.request.emit('error', error); + self.removeSocket(placeholder); + } }; -var _a; -Object.defineProperty(exports, "__esModule", { value: true }); -const assert_1 = __webpack_require__(357); -const fs = __webpack_require__(747); -const path = __webpack_require__(622); -_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -exports.IS_WINDOWS = process.platform === 'win32'; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); - } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello - } - return p.startsWith('/'); -} -exports.isRooted = isRooted; -/** - * Recursively create a directory at `fsPath`. - * - * This implementation is optimistic, meaning it attempts to create the full - * path first, and backs up the path stack from there. - * - * @param fsPath The path to create - * @param maxDepth The maximum recursion depth - * @param depth The current recursion depth - */ -function mkdirP(fsPath, maxDepth = 1000, depth = 1) { - return __awaiter(this, void 0, void 0, function* () { - assert_1.ok(fsPath, 'a path argument must be provided'); - fsPath = path.resolve(fsPath); - if (depth >= maxDepth) - return exports.mkdir(fsPath); - try { - yield exports.mkdir(fsPath); - return; - } - catch (err) { - switch (err.code) { - case 'ENOENT': { - yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); - yield exports.mkdir(fsPath); - return; - } - default: { - let stats; - try { - stats = yield exports.stat(fsPath); - } - catch (err2) { - throw err; - } - if (!stats.isDirectory()) - throw err; - } - } - } - }); -} -exports.mkdirP = mkdirP; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; + +TunnelingAgent.prototype.removeSocket = function removeSocket(socket) { + var pos = this.sockets.indexOf(socket) + if (pos === -1) { + return; + } + this.sockets.splice(pos, 1); + + var pending = this.requests.shift(); + if (pending) { + // If we have pending requests and a socket gets closed a new one + // needs to be created to take over in the pool for the one that closed. + this.createSocket(pending, function(socket) { + pending.request.onSocket(socket); }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); - } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); -} -//# sourceMappingURL=io-util.js.map + } +}; -/***/ }), -/* 673 */, -/* 674 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +function createSecureSocket(options, cb) { + var self = this; + TunnelingAgent.prototype.createSocket.call(self, options, function(socket) { + var hostHeader = options.request.getHeader('host'); + var tlsOptions = mergeOptions({}, self.options, { + socket: socket, + servername: hostHeader ? hostHeader.replace(/:.*$/, '') : options.host + }); -var wrappy = __webpack_require__(293) -var reqs = Object.create(null) -var once = __webpack_require__(49) + // 0 is dummy port for v0.6 + var secureSocket = tls.connect(0, tlsOptions); + self.sockets[self.sockets.indexOf(socket)] = secureSocket; + cb(secureSocket); + }); +} -module.exports = wrappy(inflight) -function inflight (key, cb) { - if (reqs[key]) { - reqs[key].push(cb) - return null - } else { - reqs[key] = [cb] - return makeres(key) +function toOptions(host, port, localAddress) { + if (typeof host === 'string') { // since v0.10 + return { + host: host, + port: port, + localAddress: localAddress + }; } + return host; // for v0.11 or later } -function makeres (key) { - return once(function RES () { - var cbs = reqs[key] - var len = cbs.length - var args = slice(arguments) - - // XXX It's somewhat ambiguous whether a new callback added in this - // pass should be queued for later execution if something in the - // list of callbacks throws, or if it should just be discarded. - // However, it's such an edge case that it hardly matters, and either - // choice is likely as surprising as the other. - // As it happens, we do go ahead and schedule it for later execution. - try { - for (var i = 0; i < len; i++) { - cbs[i].apply(null, args) - } - } finally { - if (cbs.length > len) { - // added more in the interim. - // de-zalgo, just in case, but don't call again. - cbs.splice(0, len) - process.nextTick(function () { - RES.apply(null, args) - }) - } else { - delete reqs[key] +function mergeOptions(target) { + for (var i = 1, len = arguments.length; i < len; ++i) { + var overrides = arguments[i]; + if (typeof overrides === 'object') { + var keys = Object.keys(overrides); + for (var j = 0, keyLen = keys.length; j < keyLen; ++j) { + var k = keys[j]; + if (overrides[k] !== undefined) { + target[k] = overrides[k]; + } } } - }) + } + return target; } -function slice (args) { - var length = args.length - var array = [] - for (var i = 0; i < length; i++) array[i] = args[i] - return array +var debug; +if (process.env.NODE_DEBUG && /\btunnel\b/.test(process.env.NODE_DEBUG)) { + debug = function() { + var args = Array.prototype.slice.call(arguments); + if (typeof args[0] === 'string') { + args[0] = 'TUNNEL: ' + args[0]; + } else { + args.unshift('TUNNEL:'); + } + console.error.apply(console, args); + } +} else { + debug = function() {}; } +exports.debug = debug; // for test /***/ }), -/* 675 */, -/* 676 */ +/* 142 */, +/* 143 */, +/* 144 */, +/* 145 */ /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; +const pump = __webpack_require__(453); +const bufferStream = __webpack_require__(966); + +class MaxBufferError extends Error { + constructor() { + super('maxBuffer exceeded'); + this.name = 'MaxBufferError'; + } +} + +function getStream(inputStream, options) { + if (!inputStream) { + return Promise.reject(new Error('Expected a stream')); + } -var Type = __webpack_require__(547); + options = Object.assign({maxBuffer: Infinity}, options); -function resolveYamlMerge(data) { - return data === '<<' || data === null; + const {maxBuffer} = options; + + let stream; + return new Promise((resolve, reject) => { + const rejectPromise = error => { + if (error) { // A null check + error.bufferedData = stream.getBufferedValue(); + } + reject(error); + }; + + stream = pump(inputStream, bufferStream(options), error => { + if (error) { + rejectPromise(error); + return; + } + + resolve(); + }); + + stream.on('data', () => { + if (stream.getBufferedLength() > maxBuffer) { + rejectPromise(new MaxBufferError()); + } + }); + }).then(() => stream.getBufferedValue()); } -module.exports = new Type('tag:yaml.org,2002:merge', { - kind: 'scalar', - resolve: resolveYamlMerge -}); +module.exports = getStream; +module.exports.buffer = (stream, options) => getStream(stream, Object.assign({}, options, {encoding: 'buffer'})); +module.exports.array = (stream, options) => getStream(stream, Object.assign({}, options, {array: true})); +module.exports.MaxBufferError = MaxBufferError; /***/ }), -/* 677 */, -/* 678 */, -/* 679 */, -/* 680 */, -/* 681 */ -/***/ (function(module) { +/* 146 */, +/* 147 */, +/* 148 */, +/* 149 */ +/***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -function posix(path) { - return path.charAt(0) === '/'; +const u = __webpack_require__(676).fromCallback +const path = __webpack_require__(622) +const fs = __webpack_require__(598) +const mkdir = __webpack_require__(727) + +function createFile (file, callback) { + function makeFile () { + fs.writeFile(file, '', err => { + if (err) return callback(err) + callback() + }) + } + + fs.stat(file, (err, stats) => { // eslint-disable-line handle-callback-err + if (!err && stats.isFile()) return callback() + const dir = path.dirname(file) + fs.stat(dir, (err, stats) => { + if (err) { + // if the directory doesn't exist, make it + if (err.code === 'ENOENT') { + return mkdir.mkdirs(dir, err => { + if (err) return callback(err) + makeFile() + }) + } + return callback(err) + } + + if (stats.isDirectory()) makeFile() + else { + // parent is not a directory + // This is just to cause an internal ENOTDIR error to be thrown + fs.readdir(dir, err => { + if (err) return callback(err) + }) + } + }) + }) } -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); +function createFileSync (file) { + let stats + try { + stats = fs.statSync(file) + } catch {} + if (stats && stats.isFile()) return - // UNC paths are always absolute - return Boolean(result[2] || isUnc); + const dir = path.dirname(file) + try { + if (!fs.statSync(dir).isDirectory()) { + // parent is not a directory + // This is just to cause an internal ENOTDIR error to be thrown + fs.readdirSync(dir) + } + } catch (err) { + // If the stat call above failed because the directory doesn't exist, create it + if (err && err.code === 'ENOENT') mkdir.mkdirsSync(dir) + else throw err + } + + fs.writeFileSync(file, '') +} + +module.exports = { + createFile: u(createFile), + createFileSync } -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; - /***/ }), -/* 682 */, -/* 683 */, -/* 684 */, -/* 685 */, -/* 686 */, -/* 687 */, -/* 688 */, -/* 689 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/* 150 */, +/* 151 */, +/* 152 */, +/* 153 */, +/* 154 */, +/* 155 */, +/* 156 */, +/* 157 */, +/* 158 */, +/* 159 */ +/***/ (function(module) { -try { - var util = __webpack_require__(669); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __webpack_require__(315); +module.exports = r => { + const n = process.versions.node.split('.').map(x => parseInt(x, 10)) + r = r.split('.').map(x => parseInt(x, 10)) + return n[0] > r[0] || (n[0] === r[0] && (n[1] > r[1] || (n[1] === r[1] && n[2] >= r[2]))) } /***/ }), -/* 690 */, -/* 691 */, -/* 692 */ -/***/ (function(__unusedmodule, exports) { +/* 160 */, +/* 161 */, +/* 162 */, +/* 163 */, +/* 164 */, +/* 165 */, +/* 166 */, +/* 167 */, +/* 168 */ +/***/ (function(module) { "use strict"; +const alias = ['stdin', 'stdout', 'stderr']; -Object.defineProperty(exports, '__esModule', { value: true }); +const hasAlias = opts => alias.some(x => Boolean(opts[x])); -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) +module.exports = opts => { + if (!opts) { + return null; + } - /* istanbul ignore next */ + if (opts.stdio && hasAlias(opts)) { + throw new Error(`It's not possible to provide \`stdio\` in combination with one of ${alias.map(x => `\`${x}\``).join(', ')}`); + } - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + if (typeof opts.stdio === 'string') { + return opts.stdio; + } - this.name = 'Deprecation'; - } + const stdio = opts.stdio || []; -} + if (!Array.isArray(stdio)) { + throw new TypeError(`Expected \`stdio\` to be of type \`string\` or \`Array\`, got \`${typeof stdio}\``); + } -exports.Deprecation = Deprecation; + const result = []; + const len = Math.max(stdio.length, alias.length); + + for (let i = 0; i < len; i++) { + let value = null; + + if (stdio[i] !== undefined) { + value = stdio[i]; + } else if (opts[alias[i]] !== undefined) { + value = opts[alias[i]]; + } + + result[i] = value; + } + + return result; +}; /***/ }), -/* 693 */, -/* 694 */, -/* 695 */ +/* 169 */, +/* 170 */, +/* 171 */ /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -const { stringify } = __webpack_require__(477) -const { outputFile } = __webpack_require__(517) - -async function outputJson (file, data, options = {}) { - const str = stringify(data, options) +const u = __webpack_require__(676).fromPromise +const jsonFile = __webpack_require__(469) - await outputFile(file, str, options) -} +jsonFile.outputJson = u(__webpack_require__(695)) +jsonFile.outputJsonSync = __webpack_require__(628) +// aliases +jsonFile.outputJSON = jsonFile.outputJson +jsonFile.outputJSONSync = jsonFile.outputJsonSync +jsonFile.writeJSON = jsonFile.writeJson +jsonFile.writeJSONSync = jsonFile.writeJsonSync +jsonFile.readJSON = jsonFile.readJson +jsonFile.readJSONSync = jsonFile.readJsonSync -module.exports = outputJson +module.exports = jsonFile /***/ }), -/* 696 */, -/* 697 */ -/***/ (function(module) { +/* 172 */, +/* 173 */, +/* 174 */, +/* 175 */, +/* 176 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; - -module.exports = (promise, onFinally) => { - onFinally = onFinally || (() => {}); - - return promise.then( - val => new Promise(resolve => { - resolve(onFinally()); - }).then(() => val), - err => new Promise(resolve => { - resolve(onFinally()); - }).then(() => { - throw err; - }) - ); -}; - + +Object.defineProperty(exports, "__esModule", { value: true }); +const core_1 = __webpack_require__(470); +/** + * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded + * + * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable + * The total status of the upload/download gets displayed according to this value + * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function + */ +class StatusReporter { + constructor(displayFrequencyInMilliseconds) { + this.totalNumberOfFilesToProcess = 0; + this.processedCount = 0; + this.largeFiles = new Map(); + this.totalFileStatus = undefined; + this.largeFileStatus = undefined; + this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds; + } + setTotalNumberOfFilesToProcess(fileTotal) { + this.totalNumberOfFilesToProcess = fileTotal; + } + start() { + // displays information about the total upload/download status + this.totalFileStatus = setInterval(() => { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); + core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); + }, this.displayFrequencyInMilliseconds); + // displays extra information about any large files that take a significant amount of time to upload or download every 1 second + this.largeFileStatus = setInterval(() => { + for (const value of Array.from(this.largeFiles.values())) { + core_1.info(value); + } + // delete all entries in the map after displaying the information so it will not be displayed again unless explicitly added + this.largeFiles.clear(); + }, 1000); + } + // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload + updateLargeFileStatus(fileName, numerator, denominator) { + // display 1 decimal place without any rounding + const percentage = this.formatPercentage(numerator, denominator); + const displayInformation = `Uploading ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`; + // any previously added display information should be overwritten for the specific large file because a map is being used + this.largeFiles.set(fileName, displayInformation); + } + stop() { + if (this.totalFileStatus) { + clearInterval(this.totalFileStatus); + } + if (this.largeFileStatus) { + clearInterval(this.largeFileStatus); + } + } + incrementProcessedCount() { + this.processedCount++; + } + formatPercentage(numerator, denominator) { + // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed + return ((numerator / denominator) * 100).toFixed(4).toString(); + } +} +exports.StatusReporter = StatusReporter; +//# sourceMappingURL=status-reporter.js.map /***/ }), -/* 698 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -const { logger } = __webpack_require__(79); -const { - prepareEnv, - createGithubInformationObject, - getEvent -} = __webpack_require__(8); -const { start } = __webpack_require__(754); -const { createCommonConfig } = __webpack_require__(668); -const { getProcessEnvVariable } = __webpack_require__(867); -const fse = __webpack_require__(232); - -/** - * Executes single flow - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {Object} pullRequestData information from pull request event - * @param {String} rootFolder path to store flow data/projects - * @param {Boolean} isArchiveArtifacts - */ -async function execute( - token, - octokit, - env, - eventData, - rootFolder, - isArchiveArtifacts -) { - const githubInformation = createGithubInformationObject(eventData, env); - const config = await createCommonConfig(githubInformation, rootFolder, env); - const context = { token, octokit, config }; - await start(context, isArchiveArtifacts); -} +/* 177 */, +/* 178 */, +/* 179 */, +/* 180 */, +/* 181 */, +/* 182 */, +/* 183 */, +/* 184 */, +/* 185 */, +/* 186 */, +/* 187 */, +/* 188 */, +/* 189 */, +/* 190 */, +/* 191 */, +/* 192 */, +/* 193 */, +/* 194 */, +/* 195 */, +/* 196 */, +/* 197 */ +/***/ (function(module, __unusedexports, __webpack_require__) { -/** - * Prepares execution when this is triggered from a github action's event - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - */ -async function executeFromEvent(token, octokit, env) { - const eventDataStr = await fse.readFile( - getProcessEnvVariable("GITHUB_EVENT_PATH"), - "utf8" - ); - const eventData = JSON.parse(eventDataStr); - await execute(token, octokit, env, eventData, undefined, true); -} +module.exports = isexe +isexe.sync = sync -/** - * Prepares execution when this is triggered from command line - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {String} rootFolder path to store flow data/projects - * @param {String} eventUrl event url - */ -async function executeLocally(token, octokit, env, rootFolder, eventUrl) { - logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); +var fs = __webpack_require__(747) - const eventData = await getEvent(octokit, eventUrl); - prepareEnv(env, eventUrl, eventData); - await execute(token, octokit, env, eventData, rootFolder, false); +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, options)) + }) } -module.exports = { executeLocally, executeFromEvent }; - +function sync (path, options) { + return checkStat(fs.statSync(path), options) +} -/***/ }), -/* 699 */, -/* 700 */, -/* 701 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +function checkStat (stat, options) { + return stat.isFile() && checkMode(stat, options) +} -const { logger } = __webpack_require__(79); -const { - prepareEnv, - createGithubInformationObject, - getEvent -} = __webpack_require__(8); -const { start } = __webpack_require__(785); -const { createCommonConfig } = __webpack_require__(668); -const { getProcessEnvVariable } = __webpack_require__(867); -const fse = __webpack_require__(232); +function checkMode (stat, options) { + var mod = stat.mode + var uid = stat.uid + var gid = stat.gid -/** - * Executes pull request flow - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {Object} pullRequestData information from pull request event - * @param {String} rootFolder path to store flow data/projects - * @param {Boolean} isArchiveArtifacts - */ -async function execute( - token, - octokit, - env, - eventData, - rootFolder, - isArchiveArtifacts -) { - const githubInformation = createGithubInformationObject(eventData, env); - const config = await createCommonConfig(githubInformation, rootFolder, env); - const context = { token, octokit, config }; - await start(context, isArchiveArtifacts); -} + var myUid = options.uid !== undefined ? + options.uid : process.getuid && process.getuid() + var myGid = options.gid !== undefined ? + options.gid : process.getgid && process.getgid() -/** - * Prepares execution when this is triggered from a github action's event - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - */ -async function executeFromEvent(token, octokit, env) { - const eventDataStr = await fse.readFile( - getProcessEnvVariable("GITHUB_EVENT_PATH"), - "utf8" - ); - const eventData = JSON.parse(eventDataStr); - await execute(token, octokit, env, eventData, undefined, true); -} + var u = parseInt('100', 8) + var g = parseInt('010', 8) + var o = parseInt('001', 8) + var ug = u | g -/** - * Prepares execution when this is triggered from command line - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {String} rootFolder path to store flow data/projects - * @param {String} eventUrl event url - */ -async function executeLocally(token, octokit, env, rootFolder, eventUrl) { - logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); + var ret = (mod & o) || + (mod & g) && gid === myGid || + (mod & u) && uid === myUid || + (mod & ug) && myUid === 0 - const eventData = await getEvent(octokit, eventUrl); - prepareEnv(env, eventUrl, eventData); - await execute(token, octokit, env, eventData, rootFolder, false); + return ret } -module.exports = { executeLocally, executeFromEvent }; +/***/ }), +/* 198 */, +/* 199 */, +/* 200 */, +/* 201 */, +/* 202 */, +/* 203 */, +/* 204 */, +/* 205 */, +/* 206 */, +/* 207 */, +/* 208 */, +/* 209 */, +/* 210 */, +/* 211 */ +/***/ (function(module) { + +module.exports = require("https"); /***/ }), -/* 702 */, -/* 703 */, -/* 704 */, -/* 705 */, -/* 706 */, -/* 707 */, -/* 708 */, -/* 709 */, -/* 710 */, -/* 711 */, -/* 712 */, -/* 713 */, -/* 714 */, -/* 715 */, -/* 716 */, -/* 717 */, -/* 718 */, -/* 719 */, -/* 720 */, -/* 721 */, -/* 722 */, -/* 723 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/* 212 */, +/* 213 */, +/* 214 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { "use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const artifact_client_1 = __webpack_require__(359); +/** + * Constructs an ArtifactClient + */ +function create() { + return artifact_client_1.DefaultArtifactClient.create(); +} +exports.create = create; +//# sourceMappingURL=artifact-client.js.map +/***/ }), +/* 215 */, +/* 216 */, +/* 217 */, +/* 218 */, +/* 219 */, +/* 220 */, +/* 221 */, +/* 222 */, +/* 223 */, +/* 224 */, +/* 225 */, +/* 226 */ +/***/ (function(__unusedmodule, exports) { -const u = __webpack_require__(0).fromCallback -const rimraf = __webpack_require__(474) +"use strict"; -module.exports = { - remove: u(rimraf), - removeSync: rimraf.sync +Object.defineProperty(exports, "__esModule", { value: true }); +class BasicCredentialHandler { + constructor(username, password) { + this.username = username; + this.password = password; + } + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + + Buffer.from(this.username + ':' + this.password).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BasicCredentialHandler = BasicCredentialHandler; +class BearerCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = 'Bearer ' + this.token; + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } +} +exports.BearerCredentialHandler = BearerCredentialHandler; +class PersonalAccessTokenCredentialHandler { + constructor(token) { + this.token = token; + } + // currently implements pre-authorization + // TODO: support preAuth = false where it hooks on 401 + prepareRequest(options) { + options.headers['Authorization'] = + 'Basic ' + Buffer.from('PAT:' + this.token).toString('base64'); + } + // This handler cannot handle 401 + canHandleAuthentication(response) { + return false; + } + handleAuthentication(httpClient, requestInfo, objs) { + return null; + } } +exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler; /***/ }), -/* 724 */, -/* 725 */, -/* 726 */, -/* 727 */ +/* 227 */, +/* 228 */ /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -var shebangRegex = __webpack_require__(816); -module.exports = function (str) { - var match = str.match(shebangRegex); +var Type = __webpack_require__(945); - if (!match) { - return null; - } +function resolveYamlBoolean(data) { + if (data === null) return false; - var arr = match[0].replace(/#! ?/, '').split(' '); - var bin = arr[0].split('/').pop(); - var arg = arr[1]; + var max = data.length; - return (bin === 'env' ? - arg : - bin + (arg ? ' ' + arg : '') - ); -}; + return (max === 4 && (data === 'true' || data === 'True' || data === 'TRUE')) || + (max === 5 && (data === 'false' || data === 'False' || data === 'FALSE')); +} +function constructYamlBoolean(data) { + return data === 'true' || + data === 'True' || + data === 'TRUE'; +} -/***/ }), -/* 728 */ -/***/ (function(__unusedmodule, exports) { +function isBoolean(object) { + return Object.prototype.toString.call(object) === '[object Boolean]'; +} -"use strict"; +module.exports = new Type('tag:yaml.org,2002:bool', { + kind: 'scalar', + resolve: resolveYamlBoolean, + construct: constructYamlBoolean, + predicate: isBoolean, + represent: { + lowercase: function (object) { return object ? 'true' : 'false'; }, + uppercase: function (object) { return object ? 'TRUE' : 'FALSE'; }, + camelcase: function (object) { return object ? 'True' : 'False'; } + }, + defaultStyle: 'lowercase' +}); -Object.defineProperty(exports, "__esModule", { value: true }); -class SearchState { - constructor(path, level) { - this.path = path; - this.level = level; - } -} -exports.SearchState = SearchState; -//# sourceMappingURL=internal-search-state.js.map /***/ }), -/* 729 */, -/* 730 */, -/* 731 */, -/* 732 */, -/* 733 */, -/* 734 */, -/* 735 */, -/* 736 */, -/* 737 */, -/* 738 */, -/* 739 */, -/* 740 */, -/* 741 */ +/* 229 */, +/* 230 */, +/* 231 */, +/* 232 */ /***/ (function(module, __unusedexports, __webpack_require__) { -const http = __webpack_require__(605); -const https = __webpack_require__(211); +"use strict"; -function requestUrl(url) { - return new Promise((resolve, reject) => { - (url.startsWith("https://") ? https : http).get(url, response => { - let chunks_of_data = []; - response.on("data", fragments => chunks_of_data.push(fragments)); - response.on("end", () => - resolve(Buffer.concat(chunks_of_data).toString()) - ); - response.on("error", error => reject(error)); - }); - }); -} -async function getUrlContent(url) { - try { - return await requestUrl(url); - } catch (error) { - throw new Error(`Error getting ${url}. Error: ${error}`); - } +module.exports = { + // Export promiseified graceful-fs: + ...__webpack_require__(869), + // Export extra methods: + ...__webpack_require__(640), + ...__webpack_require__(774), + ...__webpack_require__(615), + ...__webpack_require__(472), + ...__webpack_require__(171), + ...__webpack_require__(727), + ...__webpack_require__(959), + ...__webpack_require__(353), + ...__webpack_require__(294), + ...__webpack_require__(322), + ...__webpack_require__(368) } -module.exports = { - getUrlContent -}; +// Export fs.promises as a getter property so that we don't trigger +// ExperimentalWarning before fs.promises is actually accessed. +const fs = __webpack_require__(747) +if (Object.getOwnPropertyDescriptor(fs, 'promises')) { + Object.defineProperty(module.exports, 'promises', { + get () { return fs.promises } + }) +} /***/ }), -/* 742 */ +/* 233 */, +/* 234 */, +/* 235 */, +/* 236 */, +/* 237 */, +/* 238 */, +/* 239 */, +/* 240 */, +/* 241 */, +/* 242 */, +/* 243 */, +/* 244 */, +/* 245 */ /***/ (function(module, __unusedexports, __webpack_require__) { +module.exports = globSync +globSync.GlobSync = GlobSync + var fs = __webpack_require__(747) -var core -if (process.platform === 'win32' || global.TESTING_WINDOWS) { - core = __webpack_require__(818) -} else { - core = __webpack_require__(197) +var rp = __webpack_require__(302) +var minimatch = __webpack_require__(595) +var Minimatch = minimatch.Minimatch +var Glob = __webpack_require__(120).Glob +var util = __webpack_require__(669) +var path = __webpack_require__(622) +var assert = __webpack_require__(357) +var isAbsolute = __webpack_require__(681) +var common = __webpack_require__(856) +var alphasort = common.alphasort +var alphasorti = common.alphasorti +var setopts = common.setopts +var ownProp = common.ownProp +var childrenIgnored = common.childrenIgnored +var isIgnored = common.isIgnored + +function globSync (pattern, options) { + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') + + return new GlobSync(pattern, options).found } -module.exports = isexe -isexe.sync = sync +function GlobSync (pattern, options) { + if (!pattern) + throw new Error('must provide pattern') -function isexe (path, options, cb) { - if (typeof options === 'function') { - cb = options - options = {} - } + if (typeof options === 'function' || arguments.length === 3) + throw new TypeError('callback provided to sync glob\n'+ + 'See: https://github.com/isaacs/node-glob/issues/167') - if (!cb) { - if (typeof Promise !== 'function') { - throw new TypeError('callback not provided') - } + if (!(this instanceof GlobSync)) + return new GlobSync(pattern, options) - return new Promise(function (resolve, reject) { - isexe(path, options || {}, function (er, is) { - if (er) { - reject(er) - } else { - resolve(is) - } - }) - }) + setopts(this, pattern, options) + + if (this.noprocess) + return this + + var n = this.minimatch.set.length + this.matches = new Array(n) + for (var i = 0; i < n; i ++) { + this._process(this.minimatch.set[i], i, false) } + this._finish() +} - core(path, options || {}, function (er, is) { - // ignore EACCES because that just means we aren't allowed to run it - if (er) { - if (er.code === 'EACCES' || options && options.ignoreErrors) { - er = null - is = false +GlobSync.prototype._finish = function () { + assert(this instanceof GlobSync) + if (this.realpath) { + var self = this + this.matches.forEach(function (matchset, index) { + var set = self.matches[index] = Object.create(null) + for (var p in matchset) { + try { + p = self._makeAbs(p) + var real = rp.realpathSync(p, self.realpathCache) + set[real] = true + } catch (er) { + if (er.syscall === 'stat') + set[self._makeAbs(p)] = true + else + throw er + } } - } - cb(er, is) - }) + }) + } + common.finish(this) } -function sync (path, options) { - // my kingdom for a filtered catch - try { - return core.sync(path, options || {}) - } catch (er) { - if (options && options.ignoreErrors || er.code === 'EACCES') { - return false - } else { - throw er - } + +GlobSync.prototype._process = function (pattern, index, inGlobStar) { + assert(this instanceof GlobSync) + + // Get the first [n] parts of pattern that are all strings. + var n = 0 + while (typeof pattern[n] === 'string') { + n ++ + } + // now n is the index of the first one that is *not* a string. + + // See if there's anything else + var prefix + switch (n) { + // if not, then this is rather simple + case pattern.length: + this._processSimple(pattern.join('/'), index) + return + + case 0: + // pattern *starts* with some non-trivial item. + // going to readdir(cwd), but not include the prefix in matches. + prefix = null + break + + default: + // pattern has some string bits in the front. + // whatever it starts with, whether that's 'absolute' like /foo/bar, + // or 'relative' like '../baz' + prefix = pattern.slice(0, n).join('/') + break } -} + var remain = pattern.slice(n) -/***/ }), -/* 743 */, -/* 744 */, -/* 745 */, -/* 746 */, -/* 747 */ -/***/ (function(module) { + // get the list of entries. + var read + if (prefix === null) + read = '.' + else if (isAbsolute(prefix) || isAbsolute(pattern.join('/'))) { + if (!prefix || !isAbsolute(prefix)) + prefix = '/' + prefix + read = prefix + } else + read = prefix -module.exports = require("fs"); + var abs = this._makeAbs(read) -/***/ }), -/* 748 */, -/* 749 */, -/* 750 */, -/* 751 */, -/* 752 */, -/* 753 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { + //if ignored, skip processing + if (childrenIgnored(this, read)) + return -"use strict"; + var isGlobStar = remain[0] === minimatch.GLOBSTAR + if (isGlobStar) + this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) + else + this._processReaddir(prefix, read, abs, remain, index, inGlobStar) +} -Object.defineProperty(exports, '__esModule', { value: true }); +GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { + var entries = this._readdir(abs, inGlobStar) -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + // if the abs isn't a dir, then nothing can match! + if (!entries) + return -var endpoint = __webpack_require__(385); -var universalUserAgent = __webpack_require__(392); -var isPlainObject = __webpack_require__(356); -var nodeFetch = _interopDefault(__webpack_require__(454)); -var requestError = __webpack_require__(463); + // It will only match dot entries if it starts with a dot, or if + // dot is set. Stuff like @(.foo|.bar) isn't allowed. + var pn = remain[0] + var negate = !!this.minimatch.negate + var rawGlob = pn._glob + var dotOk = this.dot || rawGlob.charAt(0) === '.' -const VERSION = "5.4.9"; + var matchedEntries = [] + for (var i = 0; i < entries.length; i++) { + var e = entries[i] + if (e.charAt(0) !== '.' || dotOk) { + var m + if (negate && !prefix) { + m = !e.match(pn) + } else { + m = e.match(pn) + } + if (m) + matchedEntries.push(e) + } + } -function getBufferResponse(response) { - return response.arrayBuffer(); -} + var len = matchedEntries.length + // If there are no matched entries, then nothing matches. + if (len === 0) + return -function fetchWrapper(requestOptions) { - if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } + // if this is the last remaining pattern bit, then no need for + // an additional stat *unless* the user has specified mark or + // stat explicitly. We know they exist, since readdir returned + // them. - let headers = {}; - let status; - let url; - const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; - return fetch(requestOptions.url, Object.assign({ - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - redirect: requestOptions.redirect - }, requestOptions.request)).then(response => { - url = response.url; - status = response.status; + if (remain.length === 1 && !this.mark && !this.stat) { + if (!this.matches[index]) + this.matches[index] = Object.create(null) - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + if (prefix) { + if (prefix.slice(-1) !== '/') + e = prefix + '/' + e + else + e = prefix + e + } + + if (e.charAt(0) === '/' && !this.nomount) { + e = path.join(this.root, e) + } + this._emitMatch(index, e) } + // This was the last one, and no stats were needed + return + } - if (status === 204 || status === 205) { - return; - } // GitHub API returns 200 for HEAD requests + // now test all matched entries as stand-ins for that part + // of the pattern. + remain.shift() + for (var i = 0; i < len; i ++) { + var e = matchedEntries[i] + var newPattern + if (prefix) + newPattern = [prefix, e] + else + newPattern = [e] + this._process(newPattern.concat(remain), index, inGlobStar) + } +} - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } +GlobSync.prototype._emitMatch = function (index, e) { + if (isIgnored(this, e)) + return - throw new requestError.RequestError(response.statusText, status, { - headers, - request: requestOptions - }); - } + var abs = this._makeAbs(e) - if (status === 304) { - throw new requestError.RequestError("Not modified", status, { - headers, - request: requestOptions - }); - } + if (this.mark) + e = this._mark(e) - if (status >= 400) { - return response.text().then(message => { - const error = new requestError.RequestError(message, status, { - headers, - request: requestOptions - }); + if (this.absolute) { + e = abs + } - try { - let responseBody = JSON.parse(error.message); - Object.assign(error, responseBody); - let errors = responseBody.errors; // Assumption `errors` would always be in Array format + if (this.matches[index][e]) + return - error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); - } catch (e) {// ignore, see octokit/rest.js#684 - } + if (this.nodir) { + var c = this.cache[abs] + if (c === 'DIR' || Array.isArray(c)) + return + } - throw error; - }); - } + this.matches[index][e] = true - const contentType = response.headers.get("content-type"); + if (this.stat) + this._stat(e) +} - if (/application\/json/.test(contentType)) { - return response.json(); - } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } +GlobSync.prototype._readdirInGlobStar = function (abs) { + // follow all symlinked directories forever + // just proceed as if this is a non-globstar situation + if (this.follow) + return this._readdir(abs, false) - return getBufferResponse(response); - }).then(data => { - return { - status, - url, - headers, - data - }; - }).catch(error => { - if (error instanceof requestError.RequestError) { - throw error; + var entries + var lstat + var stat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er.code === 'ENOENT') { + // lstat failed, doesn't exist + return null } + } - throw new requestError.RequestError(error.message, 500, { - headers, - request: requestOptions - }); - }); -} + var isSym = lstat && lstat.isSymbolicLink() + this.symlinks[abs] = isSym -function withDefaults(oldEndpoint, newDefaults) { - const endpoint = oldEndpoint.defaults(newDefaults); + // If it's not a symlink or a dir, then it's definitely a regular file. + // don't bother doing a readdir in that case. + if (!isSym && lstat && !lstat.isDirectory()) + this.cache[abs] = 'FILE' + else + entries = this._readdir(abs, false) + + return entries +} - const newApi = function (route, parameters) { - const endpointOptions = endpoint.merge(route, parameters); +GlobSync.prototype._readdir = function (abs, inGlobStar) { + var entries - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint.parse(endpointOptions)); - } + if (inGlobStar && !ownProp(this.symlinks, abs)) + return this._readdirInGlobStar(abs) - const request = (route, parameters) => { - return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); - }; + if (ownProp(this.cache, abs)) { + var c = this.cache[abs] + if (!c || c === 'FILE') + return null - Object.assign(request, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); - return endpointOptions.request.hook(request, endpointOptions); - }; + if (Array.isArray(c)) + return c + } - return Object.assign(newApi, { - endpoint, - defaults: withDefaults.bind(null, endpoint) - }); + try { + return this._readdirEntries(abs, fs.readdirSync(abs)) + } catch (er) { + this._readdirError(abs, er) + return null + } } -const request = withDefaults(endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` +GlobSync.prototype._readdirEntries = function (abs, entries) { + // if we haven't asked to stat everything, then just + // assume that everything in there exists, so we can avoid + // having to stat it a second time. + if (!this.mark && !this.stat) { + for (var i = 0; i < entries.length; i ++) { + var e = entries[i] + if (abs === '/') + e = abs + e + else + e = abs + '/' + e + this.cache[e] = true + } } -}); - -exports.request = request; -//# sourceMappingURL=index.js.map + this.cache[abs] = entries -/***/ }), -/* 754 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + // mark and cache dir-ness + return entries +} -const { - checkoutDefinitionTree, - getPlaceHolders -} = __webpack_require__(330); -const { executeBuild } = __webpack_require__(11); -const { getTreeForProject } = __webpack_require__(594); -const { printCheckoutInformation } = __webpack_require__(656); -const { logger } = __webpack_require__(79); -const core = __webpack_require__(470); -const { - archiveArtifacts -} = __webpack_require__(503); +GlobSync.prototype._readdirError = function (f, er) { + // handle errors, and cache the information + switch (er.code) { + case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 + case 'ENOTDIR': // totally normal. means it *does* exist. + var abs = this._makeAbs(f) + this.cache[abs] = 'FILE' + if (abs === this.cwdAbs) { + var error = new Error(er.code + ' invalid cwd ' + this.cwd) + error.path = this.cwd + error.code = er.code + throw error + } + break -async function start(context, isArchiveArtifacts = true) { - core.startGroup( - `[Single Flow] Checking out ${context.config.github.groupProject} and its dependencies` - ); - const definitionTree = await getTreeForProject( - context.config.github.inputs.definitionFile, - context.config.github.repository, - await getPlaceHolders(context, context.config.github.inputs.definitionFile) - ); - const nodeChain = [definitionTree]; + case 'ENOENT': // not terribly unusual + case 'ELOOP': + case 'ENAMETOOLONG': + case 'UNKNOWN': + this.cache[this._makeAbs(f)] = false + break - logger.info( - `Single flow for project ${ - context.config.github.inputs.startingProject - }. Nodes: ${nodeChain.map(node => "\n" + node.project)}` - ); - const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); - core.endGroup(); + default: // some unusual error. Treat as failure. + this.cache[this._makeAbs(f)] = false + if (this.strict) + throw er + if (!this.silent) + console.error('glob error', er) + break + } +} - core.startGroup(`[Single Flow] Checkout Summary...`); - printCheckoutInformation(checkoutInfo); - core.endGroup(); +GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { - const executionResult = await executeBuild( - context.config.rootFolder, - nodeChain, - context.config.github.repository - ) - .then(() => true) - .catch(e => e); + var entries = this._readdir(abs, inGlobStar) - if (isArchiveArtifacts) { - core.startGroup(`[Single Flow] Archiving artifacts...`); - await archiveArtifacts( - nodeChain.find(node => node.project === context.config.github.repository), - nodeChain, - executionResult === true ? ["success", "always"] : ["failure", "always"] - ); - core.endGroup(); - } else { - logger.info("Archive artifact won't be executed"); - } + // no entries means not a dir, so it can never have matches + // foo.txt/** doesn't match foo.txt + if (!entries) + return - if (executionResult !== true) { - logger.error(executionResult); - throw new Error( - `Command executions have failed, please review latest execution ${executionResult}` - ); - } -} + // test without the globstar, and with every child both below + // and replacing the globstar. + var remainWithoutGlobStar = remain.slice(1) + var gspref = prefix ? [ prefix ] : [] + var noGlobStar = gspref.concat(remainWithoutGlobStar) -module.exports = { - start -}; + // the noGlobStar pattern exits the inGlobStar state + this._process(noGlobStar, index, false) + var len = entries.length + var isSym = this.symlinks[abs] -/***/ }), -/* 755 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + // If it's a symlink, and we're in a globstar, then stop + if (isSym && inGlobStar) + return -const assert = __webpack_require__(357); + for (var i = 0; i < len; i++) { + var e = entries[i] + if (e.charAt(0) === '.' && !this.dot) + continue -const allowedVersions = ["2.0"]; + // these two cases enter the inGlobStar state + var instead = gspref.concat(entries[i], remainWithoutGlobStar) + this._process(instead, index, true) -function validateDefinition(definition) { - assert( - definition, - "definition file is empty or couldn't be loaded, please check 'definition-file' input" - ); - validateVersion(definition.version, "definition"); + var below = gspref.concat(entries[i], remain) + this._process(below, index, true) + } } -function validateDependencies(dependencies) { - validateVersion(dependencies.version, "dependencies"); -} +GlobSync.prototype._processSimple = function (prefix, index) { + // XXX review this. Shouldn't it be doing the mounting etc + // before doing stat? kinda weird? + var exists = this._stat(prefix) -function validateVersion(version, fileName) { - assert( - version, - `version is not defined on ${fileName} file. Please add version: x, where x is one of these values ${allowedVersions}` - ); - assert( - allowedVersions.includes(version), - `version ${version} is not allowed in ${fileName} file. Allowed versions: ${allowedVersions}` - ); -} + if (!this.matches[index]) + this.matches[index] = Object.create(null) -function validateNode(node) { - assert( - node, - "node is undefined. Please check your definition file. For example, projects declared as a dependency for another project has to be already defined (I mean from order point of view)." - ); - assert( - node.project, - "project can't be undefined, please properly define your file." - ); - assert( - node.project.split("/").length === 2, - `project has to defined following \`group/project\` pattern (i.e. \`kiegroup/drools\`). "${node.project}" instead` - ); -} + // If it doesn't exist, then just mark the lack of results + if (!exists) + return -module.exports = { validateDefinition, validateDependencies, validateNode }; + if (prefix && isAbsolute(prefix) && !this.nomount) { + var trail = /[\/\\]$/.test(prefix) + if (prefix.charAt(0) === '/') { + prefix = path.join(this.root, prefix) + } else { + prefix = path.resolve(this.root, prefix) + if (trail) + prefix += '/' + } + } + if (process.platform === 'win32') + prefix = prefix.replace(/\\/g, '/') -/***/ }), -/* 756 */, -/* 757 */, -/* 758 */, -/* 759 */, -/* 760 */, -/* 761 */ -/***/ (function(module) { + // Mark this as a match + this._emitMatch(index, prefix) +} -module.exports = require("zlib"); +// Returns either 'DIR', 'FILE', or false +GlobSync.prototype._stat = function (f) { + var abs = this._makeAbs(f) + var needDir = f.slice(-1) === '/' + + if (f.length > this.maxLength) + return false -/***/ }), -/* 762 */, -/* 763 */ -/***/ (function(module) { + if (!this.stat && ownProp(this.cache, abs)) { + var c = this.cache[abs] -module.exports = removeHook + if (Array.isArray(c)) + c = 'DIR' -function removeHook (state, name, method) { - if (!state.registry[name]) { - return - } + // It exists, but maybe not how we need it + if (!needDir || c === 'DIR') + return c - var index = state.registry[name] - .map(function (registered) { return registered.orig }) - .indexOf(method) + if (needDir && c === 'FILE') + return false - if (index === -1) { - return + // otherwise we have to stat, because maybe c=true + // if we know it exists, but not what it is. } - state.registry[name].splice(index, 1) -} - + var exists + var stat = this.statCache[abs] + if (!stat) { + var lstat + try { + lstat = fs.lstatSync(abs) + } catch (er) { + if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { + this.statCache[abs] = false + return false + } + } -/***/ }), -/* 764 */, -/* 765 */ -/***/ (function(module) { + if (lstat && lstat.isSymbolicLink()) { + try { + stat = fs.statSync(abs) + } catch (er) { + stat = lstat + } + } else { + stat = lstat + } + } -module.exports = require("process"); + this.statCache[abs] = stat -/***/ }), -/* 766 */, -/* 767 */, -/* 768 */ -/***/ (function(module) { + var c = true + if (stat) + c = stat.isDirectory() ? 'DIR' : 'FILE' -"use strict"; + this.cache[abs] = this.cache[abs] || c -module.exports = function (x) { - var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); - var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + if (needDir && c === 'FILE') + return false - if (x[x.length - 1] === lf) { - x = x.slice(0, x.length - 1); - } + return c +} - if (x[x.length - 1] === cr) { - x = x.slice(0, x.length - 1); - } +GlobSync.prototype._mark = function (p) { + return common.mark(this, p) +} - return x; -}; +GlobSync.prototype._makeAbs = function (f) { + return common.makeAbs(this, f) +} /***/ }), -/* 769 */, -/* 770 */, -/* 771 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/* 246 */, +/* 247 */, +/* 248 */, +/* 249 */, +/* 250 */ +/***/ (function(__unusedmodule, exports) { -const { logger } = __webpack_require__(79); -const { - prepareEnv, - createGithubInformationObject, - getEvent -} = __webpack_require__(8); -const { start } = __webpack_require__(137); -const { createCommonConfig } = __webpack_require__(668); -const { getProcessEnvVariable } = __webpack_require__(867); -const fse = __webpack_require__(232); +"use strict"; -/** - * Executes full downstream flow - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {Object} pullRequestData information from pull request event - * @param {String} rootFolder path to store flow data/projects - * @param {Boolean} isArchiveArtifacts - */ -async function execute( - token, - octokit, - env, - eventData, - rootFolder, - isArchiveArtifacts -) { - const githubInformation = createGithubInformationObject(eventData, env); - const config = await createCommonConfig(githubInformation, rootFolder, env); - const context = { token, octokit, config }; - await start(context, isArchiveArtifacts); -} -/** - * Prepares execution when this is triggered from a github action's event - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - */ -async function executeFromEvent(token, octokit, env) { - const eventDataStr = await fse.readFile( - getProcessEnvVariable("GITHUB_EVENT_PATH"), - "utf8" - ); - const eventData = JSON.parse(eventDataStr); - await execute(token, octokit, env, eventData, undefined, true); -} +Object.defineProperty(exports, '__esModule', { value: true }); -/** - * Prepares execution when this is triggered from command line - * @param {String} token the token to communicate to github - * @param {Object} octokit octokit instance - * @param {Object} env proces.env - * @param {String} rootFolder path to store flow data/projects - * @param {String} eventUrl event url - */ -async function executeLocally(token, octokit, env, rootFolder, eventUrl) { - logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); +const Endpoints = { + actions: { + addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], + createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], + createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" + } + }], + createOrUpdateSecretForRepo: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "createOrUpdateRepoSecret"], + renamedParameters: { + name: "secret_name" + } + }], + createRegistrationToken: ["POST /repos/{owner}/{repo}/actions/runners/registration-token", {}, { + renamed: ["actions", "createRegistrationTokenForRepo"] + }], + createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], + createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], + createRemoveToken: ["POST /repos/{owner}/{repo}/actions/runners/remove-token", {}, { + renamed: ["actions", "createRemoveTokenForRepo"] + }], + createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], + createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], + deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], + deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" + } + }], + deleteSecretFromRepo: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "deleteRepoSecret"], + renamedParameters: { + name: "secret_name" + } + }], + deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], + deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], + deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], + downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], + downloadWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { + renamed: ["actions", "downloadJobLogsForWorkflowRun"] + }], + downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], + getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], + getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], + getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], + getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], + getPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key", {}, { + renamed: ["actions", "getRepoPublicKey"] + }], + getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], + getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamedParameters: { + name: "secret_name" + } + }], + getSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { + renamed: ["actions", "getRepoSecret"], + renamedParameters: { + name: "secret_name" + } + }], + getSelfHostedRunner: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { + renamed: ["actions", "getSelfHostedRunnerForRepo"] + }], + getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], + getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], + getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], + getWorkflowJob: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}", {}, { + renamed: ["actions", "getJobForWorkflowRun"] + }], + getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], + getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], + getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], + listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], + listDownloadsForSelfHostedRunnerApplication: ["GET /repos/{owner}/{repo}/actions/runners/downloads", {}, { + renamed: ["actions", "listRunnerApplicationsForRepo"] + }], + listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], + listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], + listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], + listRepoWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/runs", {}, { + renamed: ["actions", "listWorkflowRunsForRepo"] + }], + listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], + listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], + listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], + listSecretsForRepo: ["GET /repos/{owner}/{repo}/actions/secrets", {}, { + renamed: ["actions", "listRepoSecrets"] + }], + listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], + listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], + listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], + listWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { + renamed: ["actions", "downloadWorkflowJobLogs"] + }], + listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], + listWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", {}, { + renamed: ["actions", "downloadWorkflowRunLogs"] + }], + listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], + listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], + reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], + removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], + removeSelfHostedRunner: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { + renamed: ["actions", "deleteSelfHostedRunnerFromRepo"] + }], + setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] + }, + activity: { + checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], + checkStarringRepo: ["GET /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "checkRepoIsStarredByAuthenticatedUser"] + }], + deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], + deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], + getFeeds: ["GET /feeds"], + getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], + getThread: ["GET /notifications/threads/{thread_id}"], + getThreadSubscription: ["PUT /notifications", {}, { + renamed: ["activity", "getThreadSubscriptionForAuthenticatedUser"] + }], + getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], + listEventsForAuthenticatedUser: ["GET /users/{username}/events"], + listEventsForOrg: ["GET /users/{username}/events/orgs/{org}", {}, { + renamed: ["activity", "listOrgEventsForAuthenticatedUser"] + }], + listEventsForUser: ["GET /users/{username}/events", {}, { + renamed: ["activity", "listEventsForAuthenticatedUser"] + }], + listFeeds: ["GET /feeds", {}, { + renamed: ["activity", "getFeeds"] + }], + listNotifications: ["GET /notifications", {}, { + renamed: ["activity", "listNotificationsForAuthenticatedUser"] + }], + listNotificationsForAuthenticatedUser: ["GET /notifications"], + listNotificationsForRepo: ["GET /repos/{owner}/{repo}/notifications", {}, { + renamed: ["activity", "listRepoNotificationsForAuthenticatedUser"] + }], + listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], + listPublicEvents: ["GET /events"], + listPublicEventsForOrg: ["GET /orgs/{org}/events", {}, { + renamed: ["activity", "listPublicOrgEvents"] + }], + listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], + listPublicEventsForUser: ["GET /users/{username}/events/public"], + listPublicOrgEvents: ["GET /orgs/{org}/events"], + listReceivedEventsForUser: ["GET /users/{username}/received_events"], + listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], + listRepoEvents: ["GET /repos/{owner}/{repo}/events"], + listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], + listReposStarredByAuthenticatedUser: ["GET /user/starred"], + listReposStarredByUser: ["GET /users/{username}/starred"], + listReposWatchedByUser: ["GET /users/{username}/subscriptions"], + listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], + listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], + listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], + markAsRead: ["PUT /notifications", {}, { + renamed: ["activity", "markNotificationsAsRead"] + }], + markNotificationsAsRead: ["PUT /notifications"], + markNotificationsAsReadForRepo: ["PUT /repos/{owner}/{repo}/notifications", {}, { + renamed: ["activity", "markRepoNotificationsAsRead"] + }], + markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], + markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], + setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], + setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], + starRepo: ["PUT /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "starRepoForAuthenticatedUser"] + }], + starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], + unstarRepo: ["DELETE /user/starred/{owner}/{repo}", {}, { + renamed: ["activity", "unstarRepoForAuthenticatedUser"] + }], + unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] + }, + apps: { + addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + checkAccountIsAssociatedWithAny: ["GET /marketplace_listing/accounts/{account_id}", {}, { + renamed: ["apps", "getSubscriptionPlanForAccount"] + }], + checkAccountIsAssociatedWithAnyStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}", {}, { + renamed: ["apps", "getSubscriptionPlanForAccountStubbed"] + }], + checkToken: ["POST /applications/{client_id}/token"], + createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { + mediaType: { + previews: ["corsair"] + } + }], + createFromManifest: ["POST /app-manifests/{code}/conversions"], + createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] + } + }], + createInstallationToken: ["POST /app/installations/{installation_id}/access_tokens", { + mediaType: { + previews: ["machine-man"] + } + }, { + renamed: ["apps", "createInstallationAccessToken"] + }], + deleteAuthorization: ["DELETE /applications/{client_id}/grant"], + deleteInstallation: ["DELETE /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + deleteToken: ["DELETE /applications/{client_id}/token"], + getAuthenticated: ["GET /app", { + mediaType: { + previews: ["machine-man"] + } + }], + getBySlug: ["GET /apps/{app_slug}", { + mediaType: { + previews: ["machine-man"] + } + }], + getInstallation: ["GET /app/installations/{installation_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + getOrgInstallation: ["GET /orgs/{org}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], + getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], + getUserInstallation: ["GET /users/{username}/installation", { + mediaType: { + previews: ["machine-man"] + } + }], + listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], + listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], + listAccountsUserOrOrgOnPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts", {}, { + renamed: ["apps", "listAccountsForPlan"] + }], + listAccountsUserOrOrgOnPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", {}, { + renamed: ["apps", "listAccountsForPlanStubbed"] + }], + listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallations: ["GET /app/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listInstallationsForAuthenticatedUser: ["GET /user/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listMarketplacePurchasesForAuthenticatedUser: ["GET /user/marketplace_purchases", {}, { + renamed: ["apps", "listSubscriptionsForAuthenticatedUser"] + }], + listMarketplacePurchasesForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed", {}, { + renamed: ["apps", "listSubscriptionsForAuthenticatedUserStubbed"] + }], + listPlans: ["GET /marketplace_listing/plans"], + listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], + listRepos: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }, { + renamed: ["apps", "listReposAccessibleToInstallation"] + }], + listReposAccessibleToInstallation: ["GET /installation/repositories", { + mediaType: { + previews: ["machine-man"] + } + }], + listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], + listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], + removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { + mediaType: { + previews: ["machine-man"] + } + }], + resetToken: ["PATCH /applications/{client_id}/token"], + revokeInstallationAccessToken: ["DELETE /installation/token"], + revokeInstallationToken: ["DELETE /installation/token", {}, { + renamed: ["apps", "revokeInstallationAccessToken"] + }], + suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], + unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] + }, + checks: { + create: ["POST /repos/{owner}/{repo}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + createSuite: ["POST /repos/{owner}/{repo}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }], + getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { + mediaType: { + previews: ["antiope"] + } + }], + listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { + mediaType: { + previews: ["antiope"] + } + }], + listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { + mediaType: { + previews: ["antiope"] + } + }], + listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { + mediaType: { + previews: ["antiope"] + } + }], + rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { + mediaType: { + previews: ["antiope"] + } + }], + setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { + mediaType: { + previews: ["antiope"] + } + }], + update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { + mediaType: { + previews: ["antiope"] + } + }] + }, + codeScanning: { + getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], + listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] + }, + codesOfConduct: { + getAllCodesOfConduct: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getConductCode: ["GET /codes_of_conduct/{key}", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }], + listConductCodes: ["GET /codes_of_conduct", { + mediaType: { + previews: ["scarlet-witch"] + } + }, { + renamed: ["codesOfConduct", "getAllCodesOfConduct"] + }] + }, + emojis: { + get: ["GET /emojis"] + }, + gists: { + checkIsStarred: ["GET /gists/{gist_id}/star"], + create: ["POST /gists"], + createComment: ["POST /gists/{gist_id}/comments"], + delete: ["DELETE /gists/{gist_id}"], + deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], + fork: ["POST /gists/{gist_id}/forks"], + get: ["GET /gists/{gist_id}"], + getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], + getRevision: ["GET /gists/{gist_id}/{sha}"], + list: ["GET /gists"], + listComments: ["GET /gists/{gist_id}/comments"], + listCommits: ["GET /gists/{gist_id}/commits"], + listForUser: ["GET /users/{username}/gists"], + listForks: ["GET /gists/{gist_id}/forks"], + listPublic: ["GET /gists/public"], + listPublicForUser: ["GET /users/{username}/gists", {}, { + renamed: ["gists", "listForUser"] + }], + listStarred: ["GET /gists/starred"], + star: ["PUT /gists/{gist_id}/star"], + unstar: ["DELETE /gists/{gist_id}/star"], + update: ["PATCH /gists/{gist_id}"], + updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] + }, + git: { + createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], + createCommit: ["POST /repos/{owner}/{repo}/git/commits"], + createRef: ["POST /repos/{owner}/{repo}/git/refs"], + createTag: ["POST /repos/{owner}/{repo}/git/tags"], + createTree: ["POST /repos/{owner}/{repo}/git/trees"], + deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], + getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], + getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], + getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], + getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], + getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], + listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], + updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] + }, + gitignore: { + getAllTemplates: ["GET /gitignore/templates"], + getTemplate: ["GET /gitignore/templates/{name}"], + listTemplates: ["GET /gitignore/templates", {}, { + renamed: ["gitignore", "getAllTemplates"] + }] + }, + interactions: { + addOrUpdateRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }, { + renamed: ["interactions", "setRestrictionsForOrg"] + }], + addOrUpdateRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }, { + renamed: ["interactions", "setRestrictionsForRepo"] + }], + getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }], + setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { + mediaType: { + previews: ["sombra"] + } + }] + }, + issues: { + addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], + checkAssignee: ["GET /repos/{owner}/{repo}/assignees/{assignee}", {}, { + renamed: ["issues", "checkUserCanBeAssigned"] + }], + checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], + create: ["POST /repos/{owner}/{repo}/issues"], + createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], + createLabel: ["POST /repos/{owner}/{repo}/labels"], + createMilestone: ["POST /repos/{owner}/{repo}/milestones"], + deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], + deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], + deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], + get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], + getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], + getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], + getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], + getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], + list: ["GET /issues"], + listAssignees: ["GET /repos/{owner}/{repo}/assignees"], + listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], + listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], + listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], + listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { + mediaType: { + previews: ["mockingbird"] + } + }], + listForAuthenticatedUser: ["GET /user/issues"], + listForOrg: ["GET /orgs/{org}/issues"], + listForRepo: ["GET /repos/{owner}/{repo}/issues"], + listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], + listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], + listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], + listMilestones: ["GET /repos/{owner}/{repo}/milestones"], + listMilestonesForRepo: ["GET /repos/{owner}/{repo}/milestones", {}, { + renamed: ["issues", "listMilestones"] + }], + lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], + removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], + removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], + removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], + removeLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "removeAllLabels"] + }], + replaceAllLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "setLabels"] + }], + replaceLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { + renamed: ["issues", "replaceAllLabels"] + }], + setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], + unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], + update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], + updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], + updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], + updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] + }, + licenses: { + get: ["GET /licenses/{license}"], + getAllCommonlyUsed: ["GET /licenses"], + getForRepo: ["GET /repos/{owner}/{repo}/license"], + listCommonlyUsed: ["GET /licenses", {}, { + renamed: ["licenses", "getAllCommonlyUsed"] + }] + }, + markdown: { + render: ["POST /markdown"], + renderRaw: ["POST /markdown/raw", { + headers: { + "content-type": "text/plain; charset=utf-8" + } + }] + }, + meta: { + get: ["GET /meta"] + }, + migrations: { + cancelImport: ["DELETE /repos/{owner}/{repo}/import"], + deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { + mediaType: { + previews: ["wyandotte"] + } + }], + getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], + getImportProgress: ["GET /repos/{owner}/{repo}/import", {}, { + renamed: ["migrations", "getImportStatus"] + }], + getImportStatus: ["GET /repos/{owner}/{repo}/import"], + getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], + getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForAuthenticatedUser: ["GET /user/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listForOrg: ["GET /orgs/{org}/migrations", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + listReposForUser: ["GET /user/{migration_id}/repositories", { + mediaType: { + previews: ["wyandotte"] + } + }], + mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], + setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], + startForAuthenticatedUser: ["POST /user/migrations"], + startForOrg: ["POST /orgs/{org}/migrations"], + startImport: ["PUT /repos/{owner}/{repo}/import"], + unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + mediaType: { + previews: ["wyandotte"] + } + }], + updateImport: ["PATCH /repos/{owner}/{repo}/import"] + }, + orgs: { + addOrUpdateMembership: ["PUT /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "setMembershipForUser"] + }], + blockUser: ["PUT /orgs/{org}/blocks/{username}"], + checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], + checkMembership: ["GET /orgs/{org}/members/{username}", {}, { + renamed: ["orgs", "checkMembershipForUser"] + }], + checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], + checkPublicMembership: ["GET /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "checkPublicMembershipForUser"] + }], + checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], + concealMembership: ["DELETE /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "removePublicMembershipForAuthenticatedUser"] + }], + convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], + createHook: ["POST /orgs/{org}/hooks", {}, { + renamed: ["orgs", "createWebhook"] + }], + createInvitation: ["POST /orgs/{org}/invitations"], + createWebhook: ["POST /orgs/{org}/hooks"], + deleteHook: ["DELETE /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "deleteWebhook"] + }], + deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], + get: ["GET /orgs/{org}"], + getHook: ["GET /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "getWebhook"] + }], + getMembership: ["GET /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "getMembershipForUser"] + }], + getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], + getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], + getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], + list: ["GET /organizations"], + listAppInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] + } + }], + listBlockedUsers: ["GET /orgs/{org}/blocks"], + listForAuthenticatedUser: ["GET /user/orgs"], + listForUser: ["GET /users/{username}/orgs"], + listHooks: ["GET /orgs/{org}/hooks", {}, { + renamed: ["orgs", "listWebhooks"] + }], + listInstallations: ["GET /orgs/{org}/installations", { + mediaType: { + previews: ["machine-man"] + } + }, { + renamed: ["orgs", "listAppInstallations"] + }], + listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], + listMembers: ["GET /orgs/{org}/members"], + listMemberships: ["GET /user/memberships/orgs", {}, { + renamed: ["orgs", "listMembershipsForAuthenticatedUser"] + }], + listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], + listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], + listPendingInvitations: ["GET /orgs/{org}/invitations"], + listPublicMembers: ["GET /orgs/{org}/public_members"], + listWebhooks: ["GET /orgs/{org}/hooks"], + pingHook: ["POST /orgs/{org}/hooks/{hook_id}/pings", {}, { + renamed: ["orgs", "pingWebhook"] + }], + pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], + publicizeMembership: ["PUT /orgs/{org}/public_members/{username}", {}, { + renamed: ["orgs", "setPublicMembershipForAuthenticatedUser"] + }], + removeMember: ["DELETE /orgs/{org}/members/{username}"], + removeMembership: ["DELETE /orgs/{org}/memberships/{username}", {}, { + renamed: ["orgs", "removeMembershipForUser"] + }], + removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], + removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], + removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], + setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], + setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], + unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], + update: ["PATCH /orgs/{org}"], + updateHook: ["PATCH /orgs/{org}/hooks/{hook_id}", {}, { + renamed: ["orgs", "updateWebhook"] + }], + updateMembership: ["PATCH /user/memberships/orgs/{org}", {}, { + renamed: ["orgs", "updateMembershipForAuthenticatedUser"] + }], + updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], + updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + }, + projects: { + addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + createCard: ["POST /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + createColumn: ["POST /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + createForAuthenticatedUser: ["POST /user/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForOrg: ["POST /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + createForRepo: ["POST /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + delete: ["DELETE /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteCard: ["DELETE /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + deleteColumn: ["DELETE /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + get: ["GET /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getCard: ["GET /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getColumn: ["GET /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }], + getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }], + listCards: ["GET /projects/columns/{column_id}/cards", { + mediaType: { + previews: ["inertia"] + } + }], + listCollaborators: ["GET /projects/{project_id}/collaborators", { + mediaType: { + previews: ["inertia"] + } + }], + listColumns: ["GET /projects/{project_id}/columns", { + mediaType: { + previews: ["inertia"] + } + }], + listForOrg: ["GET /orgs/{org}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForRepo: ["GET /repos/{owner}/{repo}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listForUser: ["GET /users/{username}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + moveCard: ["POST /projects/columns/cards/{card_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + moveColumn: ["POST /projects/columns/{column_id}/moves", { + mediaType: { + previews: ["inertia"] + } + }], + removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { + mediaType: { + previews: ["inertia"] + } + }], + reviewUserPermissionLevel: ["GET /projects/{project_id}/collaborators/{username}/permission", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["projects", "getPermissionForUser"] + }], + update: ["PATCH /projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateCard: ["PATCH /projects/columns/cards/{card_id}", { + mediaType: { + previews: ["inertia"] + } + }], + updateColumn: ["PATCH /projects/columns/{column_id}", { + mediaType: { + previews: ["inertia"] + } + }] + }, + pulls: { + checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + create: ["POST /repos/{owner}/{repo}/pulls"], + createComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { + renamed: ["pulls", "createReviewComment"] + }], + createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], + createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + createReviewCommentReply: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", {}, { + renamed: ["pulls", "createReplyForReviewComment"] + }], + createReviewRequest: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "requestReviewers"] + }], + deleteComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "deleteReviewComment"] + }], + deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + deleteReviewRequest: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "removeRequestedReviewers"] + }], + dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], + get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], + getComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "getReviewComment"] + }], + getCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", {}, { + renamed: ["pulls", "listCommentsForReview"] + }], + getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], + list: ["GET /repos/{owner}/{repo}/pulls"], + listComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { + renamed: ["pulls", "listReviewComments"] + }], + listCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments", {}, { + renamed: ["pulls", "listReviewCommentsForRepo"] + }], + listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], + listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], + listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], + listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], + listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], + listReviewRequests: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { + renamed: ["pulls", "listRequestedReviewers"] + }], + listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], + merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], + removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], + submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], + update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], + updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { + mediaType: { + previews: ["lydian"] + } + }], + updateComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { + renamed: ["pulls", "updateReviewComment"] + }], + updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], + updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] + }, + rateLimit: { + get: ["GET /rate_limit"] + }, + reactions: { + createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + delete: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }, { + renamed: ["reactions", "deleteLegacy"] + }], + deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + deleteLegacy: ["DELETE /reactions/{reaction_id}", { + mediaType: { + previews: ["squirrel-girl"] + } + }, { + deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" + }], + listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }], + listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { + mediaType: { + previews: ["squirrel-girl"] + } + }] + }, + repos: { + acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], + addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], + addDeployKey: ["POST /repos/{owner}/{repo}/keys", {}, { + renamed: ["repos", "createDeployKey"] + }], + addProtectedBranchAdminEnforcement: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "setAdminBranchProtection"] + }], + addProtectedBranchAppRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "addAppAccessRestrictions"] + }], + addProtectedBranchRequiredSignatures: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "createCommitSignatureProtection"] + }], + addProtectedBranchRequiredStatusChecksContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "addStatusCheckContexts"] + }], + addProtectedBranchTeamRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "addTeamAccessRestrictions"] + }], + addProtectedBranchUserRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "addUserAccessRestrictions"] + }], + addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], + checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], + createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], + createDeployKey: ["POST /repos/{owner}/{repo}/keys"], + createDeployment: ["POST /repos/{owner}/{repo}/deployments"], + createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], + createForAuthenticatedUser: ["POST /user/repos"], + createFork: ["POST /repos/{owner}/{repo}/forks"], + createHook: ["POST /repos/{owner}/{repo}/hooks", {}, { + renamed: ["repos", "createWebhook"] + }], + createInOrg: ["POST /orgs/{org}/repos"], + createOrUpdateFile: ["PUT /repos/{owner}/{repo}/contents/{path}", {}, { + renamed: ["repos", "createOrUpdateFileContents"] + }], + createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], + createPagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + createRelease: ["POST /repos/{owner}/{repo}/releases"], + createStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}", {}, { + renamed: ["repos", "createCommitStatus"] + }], + createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { + mediaType: { + previews: ["baptiste"] + } + }], + createWebhook: ["POST /repos/{owner}/{repo}/hooks"], + declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], + delete: ["DELETE /repos/{owner}/{repo}"], + deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], + deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], + deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], + deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], + deleteDownload: ["DELETE /repos/{owner}/{repo}/downloads/{download_id}"], + deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], + deleteHook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "deleteWebhook"] + }], + deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], + deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }], + deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], + deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], + deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], + disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + disablePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }, { + renamed: ["repos", "deletePagesSite"] + }], + disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], + enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { + mediaType: { + previews: ["london"] + } + }], + enablePagesSite: ["POST /repos/{owner}/{repo}/pages", { + mediaType: { + previews: ["switcheroo"] + } + }, { + renamed: ["repos", "createPagesSite"] + }], + enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { + mediaType: { + previews: ["dorian"] + } + }], + get: ["GET /repos/{owner}/{repo}"], + getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], + getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], + getAllTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], + getArchiveLink: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}", {}, { + renamed: ["repos", "downloadArchive"] + }], + getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], + getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], + getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], + getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], + getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], + getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], + getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], + getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], + getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], + getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }], + getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], + getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], + getContents: ["GET /repos/{owner}/{repo}/contents/{path}", {}, { + renamed: ["repos", "getContent"] + }], + getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], + getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], + getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], + getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], + getDownload: ["GET /repos/{owner}/{repo}/downloads/{download_id}"], + getHook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "getWebhook"] + }], + getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], + getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], + getPages: ["GET /repos/{owner}/{repo}/pages"], + getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], + getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], + getProtectedBranchAdminEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "getAdminBranchProtection"] + }], + getProtectedBranchPullRequestReviewEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "getPullRequestReviewProtection"] + }], + getProtectedBranchRequiredSignatures: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "getCommitSignatureProtection"] + }], + getProtectedBranchRequiredStatusChecks: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "getStatusChecksProtection"] + }], + getProtectedBranchRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { + renamed: ["repos", "getAccessRestrictions"] + }], + getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], + getReadme: ["GET /repos/{owner}/{repo}/readme"], + getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], + getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], + getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], + getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], + getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], + getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], + getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], + getViews: ["GET /repos/{owner}/{repo}/traffic/views"], + getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], + list: ["GET /user/repos", {}, { + renamed: ["repos", "listForAuthenticatedUser"] + }], + listAssetsForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets", {}, { + renamed: ["repos", "listReleaseAssets"] + }], + listBranches: ["GET /repos/{owner}/{repo}/branches"], + listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { + mediaType: { + previews: ["groot"] + } + }], + listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], + listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], + listCommitComments: ["GET /repos/{owner}/{repo}/comments", {}, { + renamed: ["repos", "listCommitCommentsForRepo"] + }], + listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], + listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], + listCommits: ["GET /repos/{owner}/{repo}/commits"], + listContributors: ["GET /repos/{owner}/{repo}/contributors"], + listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], + listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], + listDeployments: ["GET /repos/{owner}/{repo}/deployments"], + listDownloads: ["GET /repos/{owner}/{repo}/downloads"], + listForAuthenticatedUser: ["GET /user/repos"], + listForOrg: ["GET /orgs/{org}/repos"], + listForUser: ["GET /users/{username}/repos"], + listForks: ["GET /repos/{owner}/{repo}/forks"], + listHooks: ["GET /repos/{owner}/{repo}/hooks", {}, { + renamed: ["repos", "listWebhooks"] + }], + listInvitations: ["GET /repos/{owner}/{repo}/invitations"], + listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], + listLanguages: ["GET /repos/{owner}/{repo}/languages"], + listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], + listProtectedBranchRequiredStatusChecksContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + renamed: ["repos", "getAllStatusCheckContexts"] + }], + listPublic: ["GET /repositories"], + listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { + mediaType: { + previews: ["groot"] + } + }], + listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], + listReleases: ["GET /repos/{owner}/{repo}/releases"], + listStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses", {}, { + renamed: ["repos", "listCommitStatusesForRef"] + }], + listTags: ["GET /repos/{owner}/{repo}/tags"], + listTeams: ["GET /repos/{owner}/{repo}/teams"], + listTopics: ["GET /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }, { + renamed: ["repos", "getAllTopics"] + }], + listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], + merge: ["POST /repos/{owner}/{repo}/merges"], + pingHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings", {}, { + renamed: ["repos", "pingWebhook"] + }], + pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], + removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + removeBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection", {}, { + renamed: ["repos", "deleteBranchProtection"] + }], + removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], + removeDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}", {}, { + renamed: ["repos", "deleteDeployKey"] + }], + removeProtectedBranchAdminEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { + renamed: ["repos", "deleteAdminBranchProtection"] + }], + removeProtectedBranchAppRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "removeAppAccessRestrictions"] + }], + removeProtectedBranchPullRequestReviewEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "deletePullRequestReviewProtection"] + }], + removeProtectedBranchRequiredSignatures: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { + mediaType: { + previews: ["zzzax"] + } + }, { + renamed: ["repos", "deleteCommitSignatureProtection"] + }], + removeProtectedBranchRequiredStatusChecks: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "removeStatusChecksProtection"] + }], + removeProtectedBranchRequiredStatusChecksContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "removeStatusCheckContexts"] + }], + removeProtectedBranchRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { + renamed: ["repos", "deleteAccessRestrictions"] + }], + removeProtectedBranchTeamRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "removeTeamAccessRestrictions"] + }], + removeProtectedBranchUserRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "removeUserAccessRestrictions"] + }], + removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }], + replaceProtectedBranchAppRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps", + renamed: ["repos", "setAppAccessRestrictions"] + }], + replaceProtectedBranchRequiredStatusChecksContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts", + renamed: ["repos", "setStatusCheckContexts"] + }], + replaceProtectedBranchTeamRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams", + renamed: ["repos", "setTeamAccessRestrictions"] + }], + replaceProtectedBranchUserRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users", + renamed: ["repos", "setUserAccessRestrictions"] + }], + replaceTopics: ["PUT /repos/{owner}/{repo}/topics", { + mediaType: { + previews: ["mercy"] + } + }, { + renamed: ["repos", "replaceAllTopics"] + }], + requestPageBuild: ["POST /repos/{owner}/{repo}/pages/builds", {}, { + renamed: ["repos", "requestPagesBuild"] + }], + requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], + retrieveCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", {}, { + renamed: ["repos", "getCommunityProfileMetrics"] + }], + setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], + setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { + mapToData: "apps" + }], + setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { + mapToData: "contexts" + }], + setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { + mapToData: "teams" + }], + setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { + mapToData: "users" + }], + testPushHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests", {}, { + renamed: ["repos", "testPushWebhook"] + }], + testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], + transfer: ["POST /repos/{owner}/{repo}/transfer"], + update: ["PATCH /repos/{owner}/{repo}"], + updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], + updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], + updateHook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}", {}, { + renamed: ["repos", "updateWebhook"] + }], + updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], + updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], + updateProtectedBranchPullRequestReviewEnforcement: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { + renamed: ["repos", "updatePullRequestReviewProtection"] + }], + updateProtectedBranchRequiredStatusChecks: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { + renamed: ["repos", "updateStatusChecksProtection"] + }], + updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], + updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], + updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], + updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], + updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], + uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { + baseUrl: "https://uploads.github.com" + }] + }, + search: { + code: ["GET /search/code"], + commits: ["GET /search/commits", { + mediaType: { + previews: ["cloak"] + } + }], + issuesAndPullRequests: ["GET /search/issues"], + labels: ["GET /search/labels"], + repos: ["GET /search/repositories"], + topics: ["GET /search/topics"], + users: ["GET /search/users"] + }, + teams: { + addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], + addOrUpdateMembershipInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "addOrUpdateMembershipForUserInOrg"] + }], + addOrUpdateProjectInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["teams", "addOrUpdateProjectPermissionsInOrg"] + }], + addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + addOrUpdateRepoInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { + renamed: ["teams", "addOrUpdateRepoPermissionsInOrg"] + }], + addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + checkManagesRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { + renamed: ["teams", "checkPermissionsForRepoInOrg"] + }], + checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }], + checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + create: ["POST /orgs/{org}/teams"], + createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], + deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], + getByName: ["GET /orgs/{org}/teams/{team_slug}"], + getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], + getMembershipInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "getMembershipForUserInOrg"] + }], + list: ["GET /orgs/{org}/teams"], + listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], + listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], + listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], + listForAuthenticatedUser: ["GET /user/teams"], + listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], + listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], + listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { + mediaType: { + previews: ["inertia"] + } + }], + listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], + removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], + removeMembershipInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { + renamed: ["teams", "removeMembershipForUserInOrg"] + }], + removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], + removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], + reviewProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { + mediaType: { + previews: ["inertia"] + } + }, { + renamed: ["teams", "checkPermissionsForProjectInOrg"] + }], + updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], + updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], + updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + }, + users: { + addEmailForAuthenticated: ["POST /user/emails"], + addEmails: ["POST /user/emails", {}, { + renamed: ["users", "addEmailsForAuthenticated"] + }], + block: ["PUT /user/blocks/{username}"], + checkBlocked: ["GET /user/blocks/{username}"], + checkFollowing: ["GET /user/following/{username}", {}, { + renamed: ["users", "checkPersonIsFollowedByAuthenticated"] + }], + checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], + checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], + createGpgKey: ["POST /user/gpg_keys", {}, { + renamed: ["users", "createGpgKeyForAuthenticated"] + }], + createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], + createPublicKey: ["POST /user/keys", {}, { + renamed: ["users", "createPublicSshKeyForAuthenticated"] + }], + createPublicSshKeyForAuthenticated: ["POST /user/keys"], + deleteEmailForAuthenticated: ["DELETE /user/emails"], + deleteEmails: ["DELETE /user/emails", {}, { + renamed: ["users", "deleteEmailsForAuthenticated"] + }], + deleteGpgKey: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "deleteGpgKeyForAuthenticated"] + }], + deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], + deletePublicKey: ["DELETE /user/keys/{key_id}", {}, { + renamed: ["users", "deletePublicSshKeyForAuthenticated"] + }], + deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], + follow: ["PUT /user/following/{username}"], + getAuthenticated: ["GET /user"], + getByUsername: ["GET /users/{username}"], + getContextForUser: ["GET /users/{username}/hovercard"], + getGpgKey: ["GET /user/gpg_keys/{gpg_key_id}", {}, { + renamed: ["users", "getGpgKeyForAuthenticated"] + }], + getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], + getPublicKey: ["GET /user/keys/{key_id}", {}, { + renamed: ["users", "getPublicSshKeyForAuthenticated"] + }], + getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], + list: ["GET /users"], + listBlocked: ["GET /user/blocks", {}, { + renamed: ["users", "listBlockedByAuthenticated"] + }], + listBlockedByAuthenticated: ["GET /user/blocks"], + listEmails: ["GET /user/emails", {}, { + renamed: ["users", "listEmailsForAuthenticated"] + }], + listEmailsForAuthenticated: ["GET /user/emails"], + listFollowedByAuthenticated: ["GET /user/following"], + listFollowersForAuthenticatedUser: ["GET /user/followers"], + listFollowersForUser: ["GET /users/{username}/followers"], + listFollowingForAuthenticatedUser: ["GET /user/following", {}, { + renamed: ["users", "listFollowedByAuthenticated"] + }], + listFollowingForUser: ["GET /users/{username}/following"], + listGpgKeys: ["GET /user/gpg_keys", {}, { + renamed: ["users", "listGpgKeysForAuthenticated"] + }], + listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], + listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], + listPublicEmails: ["GET /user/public_emails", {}, { + renamed: ["users", "listPublicEmailsForAuthenticatedUser"] + }], + listPublicEmailsForAuthenticated: ["GET /user/public_emails"], + listPublicKeys: ["GET /user/keys", {}, { + renamed: ["users", "listPublicSshKeysForAuthenticated"] + }], + listPublicKeysForUser: ["GET /users/{username}/keys"], + listPublicSshKeysForAuthenticated: ["GET /user/keys"], + setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], + togglePrimaryEmailVisibility: ["PATCH /user/email/visibility", {}, { + renamed: ["users", "setPrimaryEmailVisibilityForAuthenticated"] + }], + unblock: ["DELETE /user/blocks/{username}"], + unfollow: ["DELETE /user/following/{username}"], + updateAuthenticated: ["PATCH /user"] + } +}; - const eventData = await getEvent(octokit, eventUrl); - prepareEnv(env, eventUrl, eventData); - await execute(token, octokit, env, eventData, rootFolder, false); -} +const VERSION = "3.17.0"; -module.exports = { executeLocally, executeFromEvent }; +function endpointsToMethods(octokit, endpointsMap) { + const newMethods = {}; + + for (const [scope, endpoints] of Object.entries(endpointsMap)) { + for (const [methodName, endpoint] of Object.entries(endpoints)) { + const [route, defaults, decorations] = endpoint; + const [method, url] = route.split(/ /); + const endpointDefaults = Object.assign({ + method, + url + }, defaults); + if (!newMethods[scope]) { + newMethods[scope] = {}; + } -/***/ }), -/* 772 */, -/* 773 */, -/* 774 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + const scopeMethods = newMethods[scope]; -"use strict"; + if (decorations) { + scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); + continue; + } + scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); + } + } -const u = __webpack_require__(0).fromCallback -module.exports = { - copy: u(__webpack_require__(595)) + return newMethods; } +function decorate(octokit, scope, methodName, defaults, decorations) { + const requestWithDefaults = octokit.request.defaults(defaults); -/***/ }), -/* 775 */, -/* 776 */, -/* 777 */, -/* 778 */, -/* 779 */, -/* 780 */, -/* 781 */, -/* 782 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + function withDecorations(...args) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` -"use strict"; + if (decorations.mapToData) { + options = Object.assign({}, options, { + data: options[decorations.mapToData], + [decorations.mapToData]: undefined + }); + return requestWithDefaults(options); + } // NOTE: there are currently no deprecations. But we keep the code + // below for future reference -var Type = __webpack_require__(547); + if (decorations.renamed) { + const [newScope, newMethodName] = decorations.renamed; + octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); + } -module.exports = new Type('tag:yaml.org,2002:str', { - kind: 'scalar', - construct: function (data) { return data !== null ? data : ''; } -}); + if (decorations.deprecated) { + octokit.log.warn(decorations.deprecated); + } + if (decorations.renamedParameters) { + // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + const options = requestWithDefaults.endpoint.merge(...args); -/***/ }), -/* 783 */, -/* 784 */, -/* 785 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + for (const [name, alias] of Object.entries(decorations.renamedParameters)) { + // There is currently no deprecated parameter that is optional, + // so we never hit the else branch below at this point. -const { - checkoutDefinitionTree, - getPlaceHolders -} = __webpack_require__(330); -const { executeBuild } = __webpack_require__(11); -const { - getTreeForProject, - parentChainFromNode -} = __webpack_require__(594); -const { printCheckoutInformation } = __webpack_require__(656); -const { logger } = __webpack_require__(79); -const core = __webpack_require__(470); -const { - archiveArtifacts -} = __webpack_require__(503); + /* istanbul ignore else */ + if (name in options) { + octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); -async function start(context, isArchiveArtifacts = true) { - core.startGroup( - `[Pull Request Flow] Checking out ${context.config.github.groupProject} and its dependencies` - ); - const definitionTree = await getTreeForProject( - context.config.github.inputs.definitionFile, - context.config.github.repository, - await getPlaceHolders(context, context.config.github.inputs.definitionFile) - ); - const nodeChain = await parentChainFromNode(definitionTree); + if (!(alias in options)) { + options[alias] = options[name]; + } - logger.info( - `Tree for project ${ - context.config.github.repository - }. Dependencies: ${nodeChain.map(node => "\n" + node.project)}` - ); - const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); - core.endGroup(); + delete options[name]; + } + } - core.startGroup(`[Pull Request Flow] Checkout Summary...`); - printCheckoutInformation(checkoutInfo); - core.endGroup(); + return requestWithDefaults(options); + } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const executionResult = await executeBuild( - context.config.rootFolder, - nodeChain, - context.config.github.repository - ) - .then(() => true) - .catch(e => e); - if (isArchiveArtifacts) { - core.startGroup(`[Pull Request Flow] Archiving artifacts...`); - await archiveArtifacts( - nodeChain.find(node => node.project === context.config.github.repository), - nodeChain, - executionResult === true ? ["success", "always"] : ["failure", "always"] - ); - core.endGroup(); - } else { - logger.info("Archive artifact won't be executed"); + return requestWithDefaults(...args); } - if (executionResult !== true) { - logger.error(executionResult); - throw new Error( - `Command executions have failed, please review latest execution ${executionResult}` - ); - } + return Object.assign(withDecorations, requestWithDefaults); } -module.exports = { - start -}; - +/** + * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary + * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is + * done, we will remove the registerEndpoints methods and return the methods + * directly as with the other plugins. At that point we will also remove the + * legacy workarounds and deprecations. + * + * See the plan at + * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 + */ -/***/ }), -/* 786 */, -/* 787 */ -/***/ (function(module) { +function restEndpointMethods(octokit) { + return endpointsToMethods(octokit, Endpoints); +} +restEndpointMethods.VERSION = VERSION; -module.exports = { - warn: "warn", - error: "error", - ignore: "ignore" -}; +exports.restEndpointMethods = restEndpointMethods; +//# sourceMappingURL=index.js.map /***/ }), -/* 788 */, -/* 789 */, -/* 790 */, -/* 791 */, -/* 792 */, -/* 793 */, -/* 794 */ -/***/ (function(module) { +/* 251 */, +/* 252 */, +/* 253 */, +/* 254 */, +/* 255 */, +/* 256 */, +/* 257 */, +/* 258 */, +/* 259 */, +/* 260 */ +/***/ (function(module, __unusedexports, __webpack_require__) { -module.exports = require("stream"); +// Note: since nyc uses this module to output coverage, any lines +// that are in the direct sync flow of nyc's outputCoverage are +// ignored, since we can never get coverage for them. +var assert = __webpack_require__(357) +var signals = __webpack_require__(654) +var isWin = /^win/i.test(process.platform) -/***/ }), -/* 795 */, -/* 796 */ -/***/ (function(__unusedmodule, exports, __webpack_require__) { +var EE = __webpack_require__(614) +/* istanbul ignore if */ +if (typeof EE !== 'function') { + EE = EE.EventEmitter +} -"use strict"; +var emitter +if (process.__signal_exit_emitter__) { + emitter = process.__signal_exit_emitter__ +} else { + emitter = process.__signal_exit_emitter__ = new EE() + emitter.count = 0 + emitter.emitted = {} +} +// Because this emitter is a global, we have to check to see if a +// previous version of this library failed to enable infinite listeners. +// I know what you're about to say. But literally everything about +// signal-exit is a compromise with evil. Get used to it. +if (!emitter.infinite) { + emitter.setMaxListeners(Infinity) + emitter.infinite = true +} -Object.defineProperty(exports, '__esModule', { value: true }); +module.exports = function (cb, opts) { + assert.equal(typeof cb, 'function', 'a callback must be provided for exit handler') -function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + if (loaded === false) { + load() + } -var osName = _interopDefault(__webpack_require__(2)); + var ev = 'exit' + if (opts && opts.alwaysLast) { + ev = 'afterexit' + } -function getUserAgent() { - try { - return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; - } catch (error) { - if (/wmic os get Caption/.test(error.message)) { - return "Windows "; + var remove = function () { + emitter.removeListener(ev, cb) + if (emitter.listeners('exit').length === 0 && + emitter.listeners('afterexit').length === 0) { + unload() } - - return ""; } + emitter.on(ev, cb) + + return remove } -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map +module.exports.unload = unload +function unload () { + if (!loaded) { + return + } + loaded = false + signals.forEach(function (sig) { + try { + process.removeListener(sig, sigListeners[sig]) + } catch (er) {} + }) + process.emit = originalProcessEmit + process.reallyExit = originalProcessReallyExit + emitter.count -= 1 +} -/***/ }), -/* 797 */, -/* 798 */, -/* 799 */, -/* 800 */, -/* 801 */, -/* 802 */, -/* 803 */, -/* 804 */, -/* 805 */, -/* 806 */, -/* 807 */, -/* 808 */, -/* 809 */, -/* 810 */, -/* 811 */, -/* 812 */, -/* 813 */ -/***/ (function(__unusedmodule, exports) { +function emit (event, code, signal) { + if (emitter.emitted[event]) { + return + } + emitter.emitted[event] = true + emitter.emit(event, code, signal) +} -"use strict"; +// { : , ... } +var sigListeners = {} +signals.forEach(function (sig) { + sigListeners[sig] = function listener () { + // If there are no other listeners, an exit is coming! + // Simplest way: remove us and then re-send the signal. + // We know that this will kill the process, so we can + // safely emit now. + var listeners = process.listeners(sig) + if (listeners.length === emitter.count) { + unload() + emit('exit', null, sig) + /* istanbul ignore next */ + emit('afterexit', null, sig) + /* istanbul ignore next */ + if (isWin && sig === 'SIGHUP') { + // "SIGHUP" throws an `ENOSYS` error on Windows, + // so use a supported signal instead + sig = 'SIGINT' + } + process.kill(process.pid, sig) + } + } +}) +module.exports.signals = function () { + return signals +} -Object.defineProperty(exports, '__esModule', { value: true }); +module.exports.load = load -async function auth(token) { - const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; - return { - type: "token", - token: token, - tokenType - }; -} +var loaded = false -/** - * Prefix token for usage in the Authorization header - * - * @param token OAuth token or JSON Web Token - */ -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; +function load () { + if (loaded) { + return } + loaded = true - return `token ${token}`; -} + // This is the number of onSignalExit's that are in play. + // It's important so that we can count the correct number of + // listeners on signals, and don't wait for the other one to + // handle it instead of us. + emitter.count += 1 -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge(route, parameters); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); + signals = signals.filter(function (sig) { + try { + process.on(sig, sigListeners[sig]) + return true + } catch (er) { + return false + } + }) + + process.emit = processEmit + process.reallyExit = processReallyExit } -const createTokenAuth = function createTokenAuth(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } +var originalProcessReallyExit = process.reallyExit +function processReallyExit (code) { + process.exitCode = code || 0 + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + /* istanbul ignore next */ + originalProcessReallyExit.call(process, process.exitCode) +} - if (typeof token !== "string") { - throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); +var originalProcessEmit = process.emit +function processEmit (ev, arg) { + if (ev === 'exit') { + if (arg !== undefined) { + process.exitCode = arg + } + var ret = originalProcessEmit.apply(this, arguments) + emit('exit', process.exitCode, null) + /* istanbul ignore next */ + emit('afterexit', process.exitCode, null) + return ret + } else { + return originalProcessEmit.apply(this, arguments) } - - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; - -exports.createTokenAuth = createTokenAuth; -//# sourceMappingURL=index.js.map +} /***/ }), -/* 814 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +/* 261 */, +/* 262 */, +/* 263 */, +/* 264 */, +/* 265 */, +/* 266 */, +/* 267 */, +/* 268 */, +/* 269 */, +/* 270 */, +/* 271 */, +/* 272 */, +/* 273 */, +/* 274 */, +/* 275 */, +/* 276 */, +/* 277 */, +/* 278 */, +/* 279 */, +/* 280 */ +/***/ (function(module, exports) { -module.exports = which -which.sync = whichSync +exports = module.exports = SemVer -var isWindows = process.platform === 'win32' || - process.env.OSTYPE === 'cygwin' || - process.env.OSTYPE === 'msys' +var debug +/* istanbul ignore next */ +if (typeof process === 'object' && + process.env && + process.env.NODE_DEBUG && + /\bsemver\b/i.test(process.env.NODE_DEBUG)) { + debug = function () { + var args = Array.prototype.slice.call(arguments, 0) + args.unshift('SEMVER') + console.log.apply(console, args) + } +} else { + debug = function () {} +} -var path = __webpack_require__(622) -var COLON = isWindows ? ';' : ':' -var isexe = __webpack_require__(742) +// Note: this is the semver.org version of the spec that it implements +// Not necessarily the package version of this code. +exports.SEMVER_SPEC_VERSION = '2.0.0' -function getNotFoundError (cmd) { - var er = new Error('not found: ' + cmd) - er.code = 'ENOENT' +var MAX_LENGTH = 256 +var MAX_SAFE_INTEGER = Number.MAX_SAFE_INTEGER || + /* istanbul ignore next */ 9007199254740991 - return er -} +// Max safe segment length for coercion. +var MAX_SAFE_COMPONENT_LENGTH = 16 -function getPathInfo (cmd, opt) { - var colon = opt.colon || COLON - var pathEnv = opt.path || process.env.PATH || '' - var pathExt = [''] +// The actual regexps go on exports.re +var re = exports.re = [] +var src = exports.src = [] +var R = 0 - pathEnv = pathEnv.split(colon) +// The following Regular Expressions can be used for tokenizing, +// validating, and parsing SemVer version strings. - var pathExtExe = '' - if (isWindows) { - pathEnv.unshift(process.cwd()) - pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') - pathExt = pathExtExe.split(colon) +// ## Numeric Identifier +// A single `0`, or a non-zero digit followed by zero or more digits. + +var NUMERICIDENTIFIER = R++ +src[NUMERICIDENTIFIER] = '0|[1-9]\\d*' +var NUMERICIDENTIFIERLOOSE = R++ +src[NUMERICIDENTIFIERLOOSE] = '[0-9]+' +// ## Non-numeric Identifier +// Zero or more digits, followed by a letter or hyphen, and then zero or +// more letters, digits, or hyphens. - // Always test the cmd itself first. isexe will check to make sure - // it's found in the pathExt set. - if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') - pathExt.unshift('') - } +var NONNUMERICIDENTIFIER = R++ +src[NONNUMERICIDENTIFIER] = '\\d*[a-zA-Z-][a-zA-Z0-9-]*' - // If it has a slash, then we don't bother searching the pathenv. - // just check the file itself, and that's it. - if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) - pathEnv = [''] +// ## Main Version +// Three dot-separated numeric identifiers. - return { - env: pathEnv, - ext: pathExt, - extExe: pathExtExe - } -} +var MAINVERSION = R++ +src[MAINVERSION] = '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')\\.' + + '(' + src[NUMERICIDENTIFIER] + ')' -function which (cmd, opt, cb) { - if (typeof opt === 'function') { - cb = opt - opt = {} - } +var MAINVERSIONLOOSE = R++ +src[MAINVERSIONLOOSE] = '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')\\.' + + '(' + src[NUMERICIDENTIFIERLOOSE] + ')' - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] +// ## Pre-release Version Identifier +// A numeric identifier, or a non-numeric identifier. - ;(function F (i, l) { - if (i === l) { - if (opt.all && found.length) - return cb(null, found) - else - return cb(getNotFoundError(cmd)) - } +var PRERELEASEIDENTIFIER = R++ +src[PRERELEASEIDENTIFIER] = '(?:' + src[NUMERICIDENTIFIER] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) +var PRERELEASEIDENTIFIERLOOSE = R++ +src[PRERELEASEIDENTIFIERLOOSE] = '(?:' + src[NUMERICIDENTIFIERLOOSE] + + '|' + src[NONNUMERICIDENTIFIER] + ')' - var p = path.join(pathPart, cmd) - if (!pathPart && (/^\.[\\\/]/).test(cmd)) { - p = cmd.slice(0, 2) + p - } - ;(function E (ii, ll) { - if (ii === ll) return F(i + 1, l) - var ext = pathExt[ii] - isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { - if (!er && is) { - if (opt.all) - found.push(p + ext) - else - return cb(null, p + ext) - } - return E(ii + 1, ll) - }) - })(0, pathExt.length) - })(0, pathEnv.length) -} +// ## Pre-release Version +// Hyphen, followed by one or more dot-separated pre-release version +// identifiers. -function whichSync (cmd, opt) { - opt = opt || {} +var PRERELEASE = R++ +src[PRERELEASE] = '(?:-(' + src[PRERELEASEIDENTIFIER] + + '(?:\\.' + src[PRERELEASEIDENTIFIER] + ')*))' - var info = getPathInfo(cmd, opt) - var pathEnv = info.env - var pathExt = info.ext - var pathExtExe = info.extExe - var found = [] +var PRERELEASELOOSE = R++ +src[PRERELEASELOOSE] = '(?:-?(' + src[PRERELEASEIDENTIFIERLOOSE] + + '(?:\\.' + src[PRERELEASEIDENTIFIERLOOSE] + ')*))' - for (var i = 0, l = pathEnv.length; i < l; i ++) { - var pathPart = pathEnv[i] - if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') - pathPart = pathPart.slice(1, -1) +// ## Build Metadata Identifier +// Any combination of digits, letters, or hyphens. - var p = path.join(pathPart, cmd) - if (!pathPart && /^\.[\\\/]/.test(cmd)) { - p = cmd.slice(0, 2) + p - } - for (var j = 0, ll = pathExt.length; j < ll; j ++) { - var cur = p + pathExt[j] - var is - try { - is = isexe.sync(cur, { pathExt: pathExtExe }) - if (is) { - if (opt.all) - found.push(cur) - else - return cur - } - } catch (ex) {} - } - } +var BUILDIDENTIFIER = R++ +src[BUILDIDENTIFIER] = '[0-9A-Za-z-]+' - if (opt.all && found.length) - return found +// ## Build Metadata +// Plus sign, followed by one or more period-separated build metadata +// identifiers. - if (opt.nothrow) - return null +var BUILD = R++ +src[BUILD] = '(?:\\+(' + src[BUILDIDENTIFIER] + + '(?:\\.' + src[BUILDIDENTIFIER] + ')*))' - throw getNotFoundError(cmd) -} +// ## Full Version String +// A main version, followed optionally by a pre-release version and +// build metadata. +// Note that the only major, minor, patch, and pre-release sections of +// the version string are capturing groups. The build metadata is not a +// capturing group, because it should not ever be used in version +// comparison. -/***/ }), -/* 815 */, -/* 816 */ -/***/ (function(module) { +var FULL = R++ +var FULLPLAIN = 'v?' + src[MAINVERSION] + + src[PRERELEASE] + '?' + + src[BUILD] + '?' -"use strict"; +src[FULL] = '^' + FULLPLAIN + '$' -module.exports = /^#!.*/; +// like full, but allows v1.2.3 and =1.2.3, which people do sometimes. +// also, 1.0.0alpha1 (prerelease without the hyphen) which is pretty +// common in the npm registry. +var LOOSEPLAIN = '[v=\\s]*' + src[MAINVERSIONLOOSE] + + src[PRERELEASELOOSE] + '?' + + src[BUILD] + '?' +var LOOSE = R++ +src[LOOSE] = '^' + LOOSEPLAIN + '$' -/***/ }), -/* 817 */, -/* 818 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +var GTLT = R++ +src[GTLT] = '((?:<|>)?=?)' -module.exports = isexe -isexe.sync = sync +// Something like "2.*" or "1.2.x". +// Note that "x.x" is a valid xRange identifer, meaning "any version" +// Only the first item is strictly required. +var XRANGEIDENTIFIERLOOSE = R++ +src[XRANGEIDENTIFIERLOOSE] = src[NUMERICIDENTIFIERLOOSE] + '|x|X|\\*' +var XRANGEIDENTIFIER = R++ +src[XRANGEIDENTIFIER] = src[NUMERICIDENTIFIER] + '|x|X|\\*' -var fs = __webpack_require__(747) +var XRANGEPLAIN = R++ +src[XRANGEPLAIN] = '[v=\\s]*(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIER] + ')' + + '(?:' + src[PRERELEASE] + ')?' + + src[BUILD] + '?' + + ')?)?' -function checkPathExt (path, options) { - var pathext = options.pathExt !== undefined ? - options.pathExt : process.env.PATHEXT +var XRANGEPLAINLOOSE = R++ +src[XRANGEPLAINLOOSE] = '[v=\\s]*(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:\\.(' + src[XRANGEIDENTIFIERLOOSE] + ')' + + '(?:' + src[PRERELEASELOOSE] + ')?' + + src[BUILD] + '?' + + ')?)?' - if (!pathext) { - return true - } +var XRANGE = R++ +src[XRANGE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAIN] + '$' +var XRANGELOOSE = R++ +src[XRANGELOOSE] = '^' + src[GTLT] + '\\s*' + src[XRANGEPLAINLOOSE] + '$' - pathext = pathext.split(';') - if (pathext.indexOf('') !== -1) { - return true - } - for (var i = 0; i < pathext.length; i++) { - var p = pathext[i].toLowerCase() - if (p && path.substr(-p.length).toLowerCase() === p) { - return true - } - } - return false -} +// Coercion. +// Extract anything that could conceivably be a part of a valid semver +var COERCE = R++ +src[COERCE] = '(?:^|[^\\d])' + + '(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '})' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:\\.(\\d{1,' + MAX_SAFE_COMPONENT_LENGTH + '}))?' + + '(?:$|[^\\d])' -function checkStat (stat, path, options) { - if (!stat.isSymbolicLink() && !stat.isFile()) { - return false - } - return checkPathExt(path, options) -} +// Tilde ranges. +// Meaning is "reasonably at or greater than" +var LONETILDE = R++ +src[LONETILDE] = '(?:~>?)' -function isexe (path, options, cb) { - fs.stat(path, function (er, stat) { - cb(er, er ? false : checkStat(stat, path, options)) - }) -} +var TILDETRIM = R++ +src[TILDETRIM] = '(\\s*)' + src[LONETILDE] + '\\s+' +re[TILDETRIM] = new RegExp(src[TILDETRIM], 'g') +var tildeTrimReplace = '$1~' -function sync (path, options) { - return checkStat(fs.statSync(path), path, options) -} +var TILDE = R++ +src[TILDE] = '^' + src[LONETILDE] + src[XRANGEPLAIN] + '$' +var TILDELOOSE = R++ +src[TILDELOOSE] = '^' + src[LONETILDE] + src[XRANGEPLAINLOOSE] + '$' +// Caret ranges. +// Meaning is "at least and backwards compatible with" +var LONECARET = R++ +src[LONECARET] = '(?:\\^)' -/***/ }), -/* 819 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +var CARETTRIM = R++ +src[CARETTRIM] = '(\\s*)' + src[LONECARET] + '\\s+' +re[CARETTRIM] = new RegExp(src[CARETTRIM], 'g') +var caretTrimReplace = '$1^' -const glob = __webpack_require__(281); -const path = __webpack_require__(622); -const { stat } = __webpack_require__(747); -const { dirname } = __webpack_require__(622); -const { promisify } = __webpack_require__(669); -const stats = promisify(stat); -const { logger } = __webpack_require__(79); +var CARET = R++ +src[CARET] = '^' + src[LONECARET] + src[XRANGEPLAIN] + '$' +var CARETLOOSE = R++ +src[CARETLOOSE] = '^' + src[LONECARET] + src[XRANGEPLAINLOOSE] + '$' + +// A simple gt/lt/eq thing, or just "" to indicate "any version" +var COMPARATORLOOSE = R++ +src[COMPARATORLOOSE] = '^' + src[GTLT] + '\\s*(' + LOOSEPLAIN + ')$|^$' +var COMPARATOR = R++ +src[COMPARATOR] = '^' + src[GTLT] + '\\s*(' + FULLPLAIN + ')$|^$' -async function findFilesToUpload(searchPath, globOptions) { - const globber = await glob.create( - searchPath, - globOptions || getDefaultGlobOptions() - ); - const filesToUpload = await getSearchResults(globber); +// An expression to strip any whitespace between the gtlt and the thing +// it modifies, so that `> 1.2.3` ==> `>1.2.3` +var COMPARATORTRIM = R++ +src[COMPARATORTRIM] = '(\\s*)' + src[GTLT] + + '\\s*(' + LOOSEPLAIN + '|' + src[XRANGEPLAIN] + ')' - return { - filesToUpload, - rootDirectory: getRootDirectory(globber, filesToUpload) - }; -} +// this one has to use the /g flag +re[COMPARATORTRIM] = new RegExp(src[COMPARATORTRIM], 'g') +var comparatorTrimReplace = '$1$2$3' -/** - * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as - * the delimiter to control the directory structure for the artifact. This function returns the LCA - * when given an array of search paths - * - * Example 1: The patterns `/foo/` and `/bar/` returns `/` - * - * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` - */ -function getMultiPathLCA(searchPaths) { - if (searchPaths.length < 2) { - throw new Error("At least two search paths must be provided"); - } +// Something like `1.2.3 - 1.2.4` +// Note that these all use the loose form, because they'll be +// checked against either the strict or loose comparator form +// later. +var HYPHENRANGE = R++ +src[HYPHENRANGE] = '^\\s*(' + src[XRANGEPLAIN] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAIN] + ')' + + '\\s*$' - const commonPaths = []; - let smallestPathLength = Number.MAX_SAFE_INTEGER; +var HYPHENRANGELOOSE = R++ +src[HYPHENRANGELOOSE] = '^\\s*(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s+-\\s+' + + '(' + src[XRANGEPLAINLOOSE] + ')' + + '\\s*$' - const splitPaths = searchPaths - .map(searchPath => path.normalize(searchPath).split(path.sep)) - .reduce((acc, splitSearchPath) => { - smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); - acc.push(splitSearchPath); - return acc; - }, []); +// Star ranges basically just allow anything at all. +var STAR = R++ +src[STAR] = '(<|>)?=?\\s*\\*' - if (searchPaths[0].startsWith(path.sep)) { - commonPaths.push(path.sep); +// Compile to actual regexp objects. +// All are flag-free, unless they were created above with a flag. +for (var i = 0; i < R; i++) { + debug(i, src[i]) + if (!re[i]) { + re[i] = new RegExp(src[i]) } +} - let splitIndex = 0; - function isPathTheSame() { - const compare = splitPaths[0][splitIndex]; - for (let i = 1; i < splitPaths.length; i++) { - if (compare !== splitPaths[i][splitIndex]) { - return false; - } +exports.parse = parse +function parse (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false } - return true; } - while (splitIndex < smallestPathLength) { - if (!isPathTheSame()) { - break; - } - commonPaths.push(splitPaths[0][splitIndex]); - splitIndex++; + if (version instanceof SemVer) { + return version } - return path.join(...commonPaths); -} -async function getSearchResults(globber) { - const rawSearchResults = await globber.glob(); + if (typeof version !== 'string') { + return null + } - const searchResults = []; - for (const searchResult of rawSearchResults) { - const fileStats = await stats(searchResult); - if (!fileStats.isDirectory()) { - searchResults.push(searchResult); - if ( - searchResults - .map(item => item.toLowerCase()) - .find(item => item === searchResult.toLowerCase()) - ) { - logger.info( - `Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path` - ); - } - } + if (version.length > MAX_LENGTH) { + return null } - return searchResults; -} -function getRootDirectory(globber, searchResults) { - const searchPaths = globber.getSearchPaths(); - if (searchPaths.length > 1) { - logger.info( - `Multiple search paths detected. Calculating the least common ancestor of all paths` - ); - const lcaSearchPath = getMultiPathLCA(searchPaths); - logger.info( - `The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact` - ); - return lcaSearchPath; + var r = options.loose ? re[LOOSE] : re[FULL] + if (!r.test(version)) { + return null } - return searchResults.length === 1 && searchPaths[0] === searchResults[0] - ? dirname(searchResults[0]) - : searchPaths[0]; -} -function getDefaultGlobOptions() { - return { - followSymbolicLinks: true, - implicitDescendants: true, - omitBrokenSymbolicLinks: true - }; + try { + return new SemVer(version, options) + } catch (er) { + return null + } } -module.exports = { - findFilesToUpload -}; - - -/***/ }), -/* 820 */, -/* 821 */, -/* 822 */, -/* 823 */, -/* 824 */, -/* 825 */, -/* 826 */, -/* 827 */, -/* 828 */, -/* 829 */ -/***/ (function(module, __unusedexports, __webpack_require__) { +exports.valid = valid +function valid (version, options) { + var v = parse(version, options) + return v ? v.version : null +} -"use strict"; -// JS-YAML's default schema for `safeLoad` function. -// It is not described in the YAML specification. -// -// This schema is based on standard YAML's Core schema and includes most of -// extra types described at YAML tag repository. (http://yaml.org/type/) +exports.clean = clean +function clean (version, options) { + var s = parse(version.trim().replace(/^[=v]+/, ''), options) + return s ? s.version : null +} +exports.SemVer = SemVer +function SemVer (version, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + if (version instanceof SemVer) { + if (version.loose === options.loose) { + return version + } else { + version = version.version + } + } else if (typeof version !== 'string') { + throw new TypeError('Invalid Version: ' + version) + } + if (version.length > MAX_LENGTH) { + throw new TypeError('version is longer than ' + MAX_LENGTH + ' characters') + } + if (!(this instanceof SemVer)) { + return new SemVer(version, options) + } -var Schema = __webpack_require__(917); + debug('SemVer', version, options) + this.options = options + this.loose = !!options.loose + var m = version.trim().match(options.loose ? re[LOOSE] : re[FULL]) -module.exports = new Schema({ - include: [ - __webpack_require__(997) - ], - implicit: [ - __webpack_require__(482), - __webpack_require__(676) - ], - explicit: [ - __webpack_require__(516), - __webpack_require__(865), - __webpack_require__(329), - __webpack_require__(414) - ] -}); + if (!m) { + throw new TypeError('Invalid Version: ' + version) + } + this.raw = version -/***/ }), -/* 830 */ -/***/ (function(module) { + // these are actually numbers + this.major = +m[1] + this.minor = +m[2] + this.patch = +m[3] -/** - * it treats the url in case it contains - * @param {String} url a http(s)://whatever.domain/${GROUP}/${PROJECT_NAME}/${BRANCH}/whateverfile.txt format, where place olders are optional and can be placed anywhere on the string - * @param {Object} placeHolders the key/values to replace url's place holders - */ -function treatUrl(url, placeHolders) { - let result = url; - Object.entries(placeHolders).forEach( - ([key, value]) => (result = result.replace(`$\{${key}}`, value)) - ); - return result; -} + if (this.major > MAX_SAFE_INTEGER || this.major < 0) { + throw new TypeError('Invalid major version') + } -module.exports = { treatUrl }; + if (this.minor > MAX_SAFE_INTEGER || this.minor < 0) { + throw new TypeError('Invalid minor version') + } + if (this.patch > MAX_SAFE_INTEGER || this.patch < 0) { + throw new TypeError('Invalid patch version') + } -/***/ }), -/* 831 */, -/* 832 */, -/* 833 */, -/* 834 */, -/* 835 */ -/***/ (function(module) { + // numberify any prerelease numeric ids + if (!m[4]) { + this.prerelease = [] + } else { + this.prerelease = m[4].split('.').map(function (id) { + if (/^[0-9]+$/.test(id)) { + var num = +id + if (num >= 0 && num < MAX_SAFE_INTEGER) { + return num + } + } + return id + }) + } -module.exports = require("url"); + this.build = m[5] ? m[5].split('.') : [] + this.format() +} -/***/ }), -/* 836 */, -/* 837 */, -/* 838 */, -/* 839 */, -/* 840 */, -/* 841 */, -/* 842 */ -/***/ (function(__unusedmodule, exports) { +SemVer.prototype.format = function () { + this.version = this.major + '.' + this.minor + '.' + this.patch + if (this.prerelease.length) { + this.version += '-' + this.prerelease.join('.') + } + return this.version +} -"use strict"; +SemVer.prototype.toString = function () { + return this.version +} +SemVer.prototype.compare = function (other) { + debug('SemVer.compare', this.version, this.options, other) + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } -Object.defineProperty(exports, '__esModule', { value: true }); + return this.compareMain(other) || this.comparePre(other) +} -const Endpoints = { - actions: { - addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamedParameters: { - name: "secret_name" - } - }], - createOrUpdateSecretForRepo: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamed: ["actions", "createOrUpdateRepoSecret"], - renamedParameters: { - name: "secret_name" - } - }], - createRegistrationToken: ["POST /repos/{owner}/{repo}/actions/runners/registration-token", {}, { - renamed: ["actions", "createRegistrationTokenForRepo"] - }], - createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"], - createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"], - createRemoveToken: ["POST /repos/{owner}/{repo}/actions/runners/remove-token", {}, { - renamed: ["actions", "createRemoveTokenForRepo"] - }], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], - deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamedParameters: { - name: "secret_name" - } - }], - deleteSecretFromRepo: ["DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamed: ["actions", "deleteRepoSecret"], - renamedParameters: { - name: "secret_name" - } - }], - deleteSelfHostedRunnerFromOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}"], - deleteSelfHostedRunnerFromRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}"], - deleteWorkflowRunLogs: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - downloadArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}"], - downloadJobLogsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs"], - downloadWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { - renamed: ["actions", "downloadJobLogsForWorkflowRun"] - }], - downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key", {}, { - renamed: ["actions", "getRepoPublicKey"] - }], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamedParameters: { - name: "secret_name" - } - }], - getSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}", {}, { - renamed: ["actions", "getRepoSecret"], - renamedParameters: { - name: "secret_name" - } - }], - getSelfHostedRunner: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { - renamed: ["actions", "getSelfHostedRunnerForRepo"] - }], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowJob: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}", {}, { - renamed: ["actions", "getJobForWorkflowRun"] - }], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], - getWorkflowUsage: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing"], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listDownloadsForSelfHostedRunnerApplication: ["GET /repos/{owner}/{repo}/actions/runners/downloads", {}, { - renamed: ["actions", "listRunnerApplicationsForRepo"] - }], - listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/runs", {}, { - renamed: ["actions", "listWorkflowRunsForRepo"] - }], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: ["GET /repos/{owner}/{repo}/actions/runners/downloads"], - listSecretsForRepo: ["GET /repos/{owner}/{repo}/actions/secrets", {}, { - renamed: ["actions", "listRepoSecrets"] - }], - listSelectedReposForOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}/repositories"], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowJobLogs: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs", {}, { - renamed: ["actions", "downloadWorkflowJobLogs"] - }], - listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], - listWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs", {}, { - renamed: ["actions", "downloadWorkflowRunLogs"] - }], - listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], - removeSelfHostedRunner: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}", {}, { - renamed: ["actions", "deleteSelfHostedRunnerFromRepo"] - }], - setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - checkStarringRepo: ["GET /user/starred/{owner}/{repo}", {}, { - renamed: ["activity", "checkRepoIsStarredByAuthenticatedUser"] - }], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: ["DELETE /notifications/threads/{thread_id}/subscription"], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscription: ["PUT /notifications", {}, { - renamed: ["activity", "getThreadSubscriptionForAuthenticatedUser"] - }], - getThreadSubscriptionForAuthenticatedUser: ["GET /notifications/threads/{thread_id}/subscription"], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listEventsForOrg: ["GET /users/{username}/events/orgs/{org}", {}, { - renamed: ["activity", "listOrgEventsForAuthenticatedUser"] - }], - listEventsForUser: ["GET /users/{username}/events", {}, { - renamed: ["activity", "listEventsForAuthenticatedUser"] - }], - listFeeds: ["GET /feeds", {}, { - renamed: ["activity", "getFeeds"] - }], - listNotifications: ["GET /notifications", {}, { - renamed: ["activity", "listNotificationsForAuthenticatedUser"] - }], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listNotificationsForRepo: ["GET /repos/{owner}/{repo}/notifications", {}, { - renamed: ["activity", "listRepoNotificationsForAuthenticatedUser"] - }], - listOrgEventsForAuthenticatedUser: ["GET /users/{username}/events/orgs/{org}"], - listPublicEvents: ["GET /events"], - listPublicEventsForOrg: ["GET /orgs/{org}/events", {}, { - renamed: ["activity", "listPublicOrgEvents"] - }], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: ["GET /users/{username}/received_events/public"], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: ["GET /repos/{owner}/{repo}/notifications"], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markAsRead: ["PUT /notifications", {}, { - renamed: ["activity", "markNotificationsAsRead"] - }], - markNotificationsAsRead: ["PUT /notifications"], - markNotificationsAsReadForRepo: ["PUT /repos/{owner}/{repo}/notifications", {}, { - renamed: ["activity", "markRepoNotificationsAsRead"] - }], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: ["PUT /notifications/threads/{thread_id}/subscription"], - starRepo: ["PUT /user/starred/{owner}/{repo}", {}, { - renamed: ["activity", "starRepoForAuthenticatedUser"] - }], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepo: ["DELETE /user/starred/{owner}/{repo}", {}, { - renamed: ["activity", "unstarRepoForAuthenticatedUser"] - }], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: ["PUT /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] - } - }], - checkAccountIsAssociatedWithAny: ["GET /marketplace_listing/accounts/{account_id}", {}, { - renamed: ["apps", "getSubscriptionPlanForAccount"] - }], - checkAccountIsAssociatedWithAnyStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}", {}, { - renamed: ["apps", "getSubscriptionPlanForAccountStubbed"] - }], - checkToken: ["POST /applications/{client_id}/token"], - createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", { - mediaType: { - previews: ["corsair"] - } - }], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens", { - mediaType: { - previews: ["machine-man"] - } - }], - createInstallationToken: ["POST /app/installations/{installation_id}/access_tokens", { - mediaType: { - previews: ["machine-man"] - } - }, { - renamed: ["apps", "createInstallationAccessToken"] - }], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] +SemVer.prototype.compareMain = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + return compareIdentifiers(this.major, other.major) || + compareIdentifiers(this.minor, other.minor) || + compareIdentifiers(this.patch, other.patch) +} + +SemVer.prototype.comparePre = function (other) { + if (!(other instanceof SemVer)) { + other = new SemVer(other, this.options) + } + + // NOT having a prerelease is > having one + if (this.prerelease.length && !other.prerelease.length) { + return -1 + } else if (!this.prerelease.length && other.prerelease.length) { + return 1 + } else if (!this.prerelease.length && !other.prerelease.length) { + return 0 + } + + var i = 0 + do { + var a = this.prerelease[i] + var b = other.prerelease[i] + debug('prerelease compare', i, a, b) + if (a === undefined && b === undefined) { + return 0 + } else if (b === undefined) { + return 1 + } else if (a === undefined) { + return -1 + } else if (a === b) { + continue + } else { + return compareIdentifiers(a, b) + } + } while (++i) +} + +// preminor will bump the version up to the next minor release, and immediately +// down to pre-release. premajor and prepatch work the same way. +SemVer.prototype.inc = function (release, identifier) { + switch (release) { + case 'premajor': + this.prerelease.length = 0 + this.patch = 0 + this.minor = 0 + this.major++ + this.inc('pre', identifier) + break + case 'preminor': + this.prerelease.length = 0 + this.patch = 0 + this.minor++ + this.inc('pre', identifier) + break + case 'prepatch': + // If this is already a prerelease, it will bump to the next version + // drop any prereleases that might already exist, since they are not + // relevant at this point. + this.prerelease.length = 0 + this.inc('patch', identifier) + this.inc('pre', identifier) + break + // If the input is a non-prerelease version, this acts the same as + // prepatch. + case 'prerelease': + if (this.prerelease.length === 0) { + this.inc('patch', identifier) } - }], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app", { - mediaType: { - previews: ["machine-man"] + this.inc('pre', identifier) + break + + case 'major': + // If this is a pre-major version, bump up to the same major version. + // Otherwise increment major. + // 1.0.0-5 bumps to 1.0.0 + // 1.1.0 bumps to 2.0.0 + if (this.minor !== 0 || + this.patch !== 0 || + this.prerelease.length === 0) { + this.major++ } - }], - getBySlug: ["GET /apps/{app_slug}", { - mediaType: { - previews: ["machine-man"] + this.minor = 0 + this.patch = 0 + this.prerelease = [] + break + case 'minor': + // If this is a pre-minor version, bump up to the same minor version. + // Otherwise increment minor. + // 1.2.0-5 bumps to 1.2.0 + // 1.2.1 bumps to 1.3.0 + if (this.patch !== 0 || this.prerelease.length === 0) { + this.minor++ } - }], - getInstallation: ["GET /app/installations/{installation_id}", { - mediaType: { - previews: ["machine-man"] + this.patch = 0 + this.prerelease = [] + break + case 'patch': + // If this is not a pre-release version, it will increment the patch. + // If it is a pre-release it will bump up to the same patch version. + // 1.2.0-5 patches to 1.2.0 + // 1.2.0 patches to 1.2.1 + if (this.prerelease.length === 0) { + this.patch++ } - }], - getOrgInstallation: ["GET /orgs/{org}/installation", { - mediaType: { - previews: ["machine-man"] + this.prerelease = [] + break + // This probably shouldn't be used publicly. + // 1.0.0 "pre" would become 1.0.0-0 which is the wrong direction. + case 'pre': + if (this.prerelease.length === 0) { + this.prerelease = [0] + } else { + var i = this.prerelease.length + while (--i >= 0) { + if (typeof this.prerelease[i] === 'number') { + this.prerelease[i]++ + i = -2 + } + } + if (i === -1) { + // didn't increment anything + this.prerelease.push(0) + } } - }], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation", { - mediaType: { - previews: ["machine-man"] + if (identifier) { + // 1.2.0-beta.1 bumps to 1.2.0-beta.2, + // 1.2.0-beta.fooblz or 1.2.0-beta bumps to 1.2.0-beta.0 + if (this.prerelease[0] === identifier) { + if (isNaN(this.prerelease[1])) { + this.prerelease = [identifier, 0] + } + } else { + this.prerelease = [identifier, 0] + } } - }], - getSubscriptionPlanForAccount: ["GET /marketplace_listing/accounts/{account_id}"], - getSubscriptionPlanForAccountStubbed: ["GET /marketplace_listing/stubbed/accounts/{account_id}"], - getUserInstallation: ["GET /users/{username}/installation", { - mediaType: { - previews: ["machine-man"] + break + + default: + throw new Error('invalid increment argument: ' + release) + } + this.format() + this.raw = this.version + return this +} + +exports.inc = inc +function inc (version, release, loose, identifier) { + if (typeof (loose) === 'string') { + identifier = loose + loose = undefined + } + + try { + return new SemVer(version, loose).inc(release, identifier).version + } catch (er) { + return null + } +} + +exports.diff = diff +function diff (version1, version2) { + if (eq(version1, version2)) { + return null + } else { + var v1 = parse(version1) + var v2 = parse(version2) + var prefix = '' + if (v1.prerelease.length || v2.prerelease.length) { + prefix = 'pre' + var defaultResult = 'prerelease' + } + for (var key in v1) { + if (key === 'major' || key === 'minor' || key === 'patch') { + if (v1[key] !== v2[key]) { + return prefix + key + } } - }], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts"], - listAccountsUserOrOrgOnPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts", {}, { - renamed: ["apps", "listAccountsForPlan"] - }], - listAccountsUserOrOrgOnPlanStubbed: ["GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", {}, { - renamed: ["apps", "listAccountsForPlanStubbed"] - }], - listInstallationReposForAuthenticatedUser: ["GET /user/installations/{installation_id}/repositories", { - mediaType: { - previews: ["machine-man"] + } + return defaultResult // may be undefined + } +} + +exports.compareIdentifiers = compareIdentifiers + +var numeric = /^[0-9]+$/ +function compareIdentifiers (a, b) { + var anum = numeric.test(a) + var bnum = numeric.test(b) + + if (anum && bnum) { + a = +a + b = +b + } + + return a === b ? 0 + : (anum && !bnum) ? -1 + : (bnum && !anum) ? 1 + : a < b ? -1 + : 1 +} + +exports.rcompareIdentifiers = rcompareIdentifiers +function rcompareIdentifiers (a, b) { + return compareIdentifiers(b, a) +} + +exports.major = major +function major (a, loose) { + return new SemVer(a, loose).major +} + +exports.minor = minor +function minor (a, loose) { + return new SemVer(a, loose).minor +} + +exports.patch = patch +function patch (a, loose) { + return new SemVer(a, loose).patch +} + +exports.compare = compare +function compare (a, b, loose) { + return new SemVer(a, loose).compare(new SemVer(b, loose)) +} + +exports.compareLoose = compareLoose +function compareLoose (a, b) { + return compare(a, b, true) +} + +exports.rcompare = rcompare +function rcompare (a, b, loose) { + return compare(b, a, loose) +} + +exports.sort = sort +function sort (list, loose) { + return list.sort(function (a, b) { + return exports.compare(a, b, loose) + }) +} + +exports.rsort = rsort +function rsort (list, loose) { + return list.sort(function (a, b) { + return exports.rcompare(a, b, loose) + }) +} + +exports.gt = gt +function gt (a, b, loose) { + return compare(a, b, loose) > 0 +} + +exports.lt = lt +function lt (a, b, loose) { + return compare(a, b, loose) < 0 +} + +exports.eq = eq +function eq (a, b, loose) { + return compare(a, b, loose) === 0 +} + +exports.neq = neq +function neq (a, b, loose) { + return compare(a, b, loose) !== 0 +} + +exports.gte = gte +function gte (a, b, loose) { + return compare(a, b, loose) >= 0 +} + +exports.lte = lte +function lte (a, b, loose) { + return compare(a, b, loose) <= 0 +} + +exports.cmp = cmp +function cmp (a, op, b, loose) { + switch (op) { + case '===': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a === b + + case '!==': + if (typeof a === 'object') + a = a.version + if (typeof b === 'object') + b = b.version + return a !== b + + case '': + case '=': + case '==': + return eq(a, b, loose) + + case '!=': + return neq(a, b, loose) + + case '>': + return gt(a, b, loose) + + case '>=': + return gte(a, b, loose) + + case '<': + return lt(a, b, loose) + + case '<=': + return lte(a, b, loose) + + default: + throw new TypeError('Invalid operator: ' + op) + } +} + +exports.Comparator = Comparator +function Comparator (comp, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (comp instanceof Comparator) { + if (comp.loose === !!options.loose) { + return comp + } else { + comp = comp.value + } + } + + if (!(this instanceof Comparator)) { + return new Comparator(comp, options) + } + + debug('comparator', comp, options) + this.options = options + this.loose = !!options.loose + this.parse(comp) + + if (this.semver === ANY) { + this.value = '' + } else { + this.value = this.operator + this.semver.version + } + + debug('comp', this) +} + +var ANY = {} +Comparator.prototype.parse = function (comp) { + var r = this.options.loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var m = comp.match(r) + + if (!m) { + throw new TypeError('Invalid comparator: ' + comp) + } + + this.operator = m[1] + if (this.operator === '=') { + this.operator = '' + } + + // if it literally is just '>' or '' then allow anything. + if (!m[2]) { + this.semver = ANY + } else { + this.semver = new SemVer(m[2], this.options.loose) + } +} + +Comparator.prototype.toString = function () { + return this.value +} + +Comparator.prototype.test = function (version) { + debug('Comparator.test', version, this.options.loose) + + if (this.semver === ANY) { + return true + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + return cmp(version, this.operator, this.semver, this.options) +} + +Comparator.prototype.intersects = function (comp, options) { + if (!(comp instanceof Comparator)) { + throw new TypeError('a Comparator is required') + } + + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + var rangeTmp + + if (this.operator === '') { + rangeTmp = new Range(comp.value, options) + return satisfies(this.value, rangeTmp, options) + } else if (comp.operator === '') { + rangeTmp = new Range(this.value, options) + return satisfies(comp.semver, rangeTmp, options) + } + + var sameDirectionIncreasing = + (this.operator === '>=' || this.operator === '>') && + (comp.operator === '>=' || comp.operator === '>') + var sameDirectionDecreasing = + (this.operator === '<=' || this.operator === '<') && + (comp.operator === '<=' || comp.operator === '<') + var sameSemVer = this.semver.version === comp.semver.version + var differentDirectionsInclusive = + (this.operator === '>=' || this.operator === '<=') && + (comp.operator === '>=' || comp.operator === '<=') + var oppositeDirectionsLessThan = + cmp(this.semver, '<', comp.semver, options) && + ((this.operator === '>=' || this.operator === '>') && + (comp.operator === '<=' || comp.operator === '<')) + var oppositeDirectionsGreaterThan = + cmp(this.semver, '>', comp.semver, options) && + ((this.operator === '<=' || this.operator === '<') && + (comp.operator === '>=' || comp.operator === '>')) + + return sameDirectionIncreasing || sameDirectionDecreasing || + (sameSemVer && differentDirectionsInclusive) || + oppositeDirectionsLessThan || oppositeDirectionsGreaterThan +} + +exports.Range = Range +function Range (range, options) { + if (!options || typeof options !== 'object') { + options = { + loose: !!options, + includePrerelease: false + } + } + + if (range instanceof Range) { + if (range.loose === !!options.loose && + range.includePrerelease === !!options.includePrerelease) { + return range + } else { + return new Range(range.raw, options) + } + } + + if (range instanceof Comparator) { + return new Range(range.value, options) + } + + if (!(this instanceof Range)) { + return new Range(range, options) + } + + this.options = options + this.loose = !!options.loose + this.includePrerelease = !!options.includePrerelease + + // First, split based on boolean or || + this.raw = range + this.set = range.split(/\s*\|\|\s*/).map(function (range) { + return this.parseRange(range.trim()) + }, this).filter(function (c) { + // throw out any that are not relevant for whatever reason + return c.length + }) + + if (!this.set.length) { + throw new TypeError('Invalid SemVer Range: ' + range) + } + + this.format() +} + +Range.prototype.format = function () { + this.range = this.set.map(function (comps) { + return comps.join(' ').trim() + }).join('||').trim() + return this.range +} + +Range.prototype.toString = function () { + return this.range +} + +Range.prototype.parseRange = function (range) { + var loose = this.options.loose + range = range.trim() + // `1.2.3 - 1.2.4` => `>=1.2.3 <=1.2.4` + var hr = loose ? re[HYPHENRANGELOOSE] : re[HYPHENRANGE] + range = range.replace(hr, hyphenReplace) + debug('hyphen replace', range) + // `> 1.2.3 < 1.2.5` => `>1.2.3 <1.2.5` + range = range.replace(re[COMPARATORTRIM], comparatorTrimReplace) + debug('comparator trim', range, re[COMPARATORTRIM]) + + // `~ 1.2.3` => `~1.2.3` + range = range.replace(re[TILDETRIM], tildeTrimReplace) + + // `^ 1.2.3` => `^1.2.3` + range = range.replace(re[CARETTRIM], caretTrimReplace) + + // normalize spaces + range = range.split(/\s+/).join(' ') + + // At this point, the range is completely trimmed and + // ready to be split into comparators. + + var compRe = loose ? re[COMPARATORLOOSE] : re[COMPARATOR] + var set = range.split(' ').map(function (comp) { + return parseComparator(comp, this.options) + }, this).join(' ').split(/\s+/) + if (this.options.loose) { + // in loose mode, throw out any that are not valid comparators + set = set.filter(function (comp) { + return !!comp.match(compRe) + }) + } + set = set.map(function (comp) { + return new Comparator(comp, this.options) + }, this) + + return set +} + +Range.prototype.intersects = function (range, options) { + if (!(range instanceof Range)) { + throw new TypeError('a Range is required') + } + + return this.set.some(function (thisComparators) { + return thisComparators.every(function (thisComparator) { + return range.set.some(function (rangeComparators) { + return rangeComparators.every(function (rangeComparator) { + return thisComparator.intersects(rangeComparator, options) + }) + }) + }) + }) +} + +// Mostly just for testing and legacy API reasons +exports.toComparators = toComparators +function toComparators (range, options) { + return new Range(range, options).set.map(function (comp) { + return comp.map(function (c) { + return c.value + }).join(' ').trim().split(' ') + }) +} + +// comprised of xranges, tildes, stars, and gtlt's at this point. +// already replaced the hyphen ranges +// turn into a set of JUST comparators. +function parseComparator (comp, options) { + debug('comp', comp, options) + comp = replaceCarets(comp, options) + debug('caret', comp) + comp = replaceTildes(comp, options) + debug('tildes', comp) + comp = replaceXRanges(comp, options) + debug('xrange', comp) + comp = replaceStars(comp, options) + debug('stars', comp) + return comp +} + +function isX (id) { + return !id || id.toLowerCase() === 'x' || id === '*' +} + +// ~, ~> --> * (any, kinda silly) +// ~2, ~2.x, ~2.x.x, ~>2, ~>2.x ~>2.x.x --> >=2.0.0 <3.0.0 +// ~2.0, ~2.0.x, ~>2.0, ~>2.0.x --> >=2.0.0 <2.1.0 +// ~1.2, ~1.2.x, ~>1.2, ~>1.2.x --> >=1.2.0 <1.3.0 +// ~1.2.3, ~>1.2.3 --> >=1.2.3 <1.3.0 +// ~1.2.0, ~>1.2.0 --> >=1.2.0 <1.3.0 +function replaceTildes (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceTilde(comp, options) + }).join(' ') +} + +function replaceTilde (comp, options) { + var r = options.loose ? re[TILDELOOSE] : re[TILDE] + return comp.replace(r, function (_, M, m, p, pr) { + debug('tilde', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + // ~1.2 == >=1.2.0 <1.3.0 + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else if (pr) { + debug('replaceTilde pr', pr) + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } else { + // ~1.2.3 == >=1.2.3 <1.3.0 + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + + debug('tilde return', ret) + return ret + }) +} + +// ^ --> * (any, kinda silly) +// ^2, ^2.x, ^2.x.x --> >=2.0.0 <3.0.0 +// ^2.0, ^2.0.x --> >=2.0.0 <3.0.0 +// ^1.2, ^1.2.x --> >=1.2.0 <2.0.0 +// ^1.2.3 --> >=1.2.3 <2.0.0 +// ^1.2.0 --> >=1.2.0 <2.0.0 +function replaceCarets (comp, options) { + return comp.trim().split(/\s+/).map(function (comp) { + return replaceCaret(comp, options) + }).join(' ') +} + +function replaceCaret (comp, options) { + debug('caret', comp, options) + var r = options.loose ? re[CARETLOOSE] : re[CARET] + return comp.replace(r, function (_, M, m, p, pr) { + debug('caret', comp, _, M, m, p, pr) + var ret + + if (isX(M)) { + ret = '' + } else if (isX(m)) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (isX(p)) { + if (M === '0') { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } else { + ret = '>=' + M + '.' + m + '.0 <' + (+M + 1) + '.0.0' } - }], - listInstallations: ["GET /app/installations", { - mediaType: { - previews: ["machine-man"] + } else if (pr) { + debug('replaceCaret pr', pr) + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + '-' + pr + + ' <' + (+M + 1) + '.0.0' } - }], - listInstallationsForAuthenticatedUser: ["GET /user/installations", { - mediaType: { - previews: ["machine-man"] + } else { + debug('no pr') + if (M === '0') { + if (m === '0') { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + m + '.' + (+p + 1) + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + M + '.' + (+m + 1) + '.0' + } + } else { + ret = '>=' + M + '.' + m + '.' + p + + ' <' + (+M + 1) + '.0.0' } - }], - listMarketplacePurchasesForAuthenticatedUser: ["GET /user/marketplace_purchases", {}, { - renamed: ["apps", "listSubscriptionsForAuthenticatedUser"] - }], - listMarketplacePurchasesForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed", {}, { - renamed: ["apps", "listSubscriptionsForAuthenticatedUserStubbed"] - }], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listRepos: ["GET /installation/repositories", { - mediaType: { - previews: ["machine-man"] + } + + debug('caret return', ret) + return ret + }) +} + +function replaceXRanges (comp, options) { + debug('replaceXRanges', comp, options) + return comp.split(/\s+/).map(function (comp) { + return replaceXRange(comp, options) + }).join(' ') +} + +function replaceXRange (comp, options) { + comp = comp.trim() + var r = options.loose ? re[XRANGELOOSE] : re[XRANGE] + return comp.replace(r, function (ret, gtlt, M, m, p, pr) { + debug('xRange', comp, ret, gtlt, M, m, p, pr) + var xM = isX(M) + var xm = xM || isX(m) + var xp = xm || isX(p) + var anyX = xp + + if (gtlt === '=' && anyX) { + gtlt = '' + } + + if (xM) { + if (gtlt === '>' || gtlt === '<') { + // nothing is allowed + ret = '<0.0.0' + } else { + // nothing is forbidden + ret = '*' } - }, { - renamed: ["apps", "listReposAccessibleToInstallation"] - }], - listReposAccessibleToInstallation: ["GET /installation/repositories", { - mediaType: { - previews: ["machine-man"] + } else if (gtlt && anyX) { + // we know patch is an x, because we have any x at all. + // replace X with 0 + if (xm) { + m = 0 } - }], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: ["GET /user/marketplace_purchases/stubbed"], - removeRepoFromInstallation: ["DELETE /user/installations/{installation_id}/repositories/{repository_id}", { - mediaType: { - previews: ["machine-man"] + p = 0 + + if (gtlt === '>') { + // >1 => >=2.0.0 + // >1.2 => >=1.3.0 + // >1.2.3 => >= 1.2.4 + gtlt = '>=' + if (xm) { + M = +M + 1 + m = 0 + p = 0 + } else { + m = +m + 1 + p = 0 + } + } else if (gtlt === '<=') { + // <=0.7.x is actually <0.8.0, since any 0.7.x should + // pass. Similarly, <=7.x is actually <8.0.0, etc. + gtlt = '<' + if (xm) { + M = +M + 1 + } else { + m = +m + 1 + } } - }], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - revokeInstallationToken: ["DELETE /installation/token", {}, { - renamed: ["apps", "revokeInstallationAccessToken"] - }], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: ["DELETE /app/installations/{installation_id}/suspended"] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs", { - mediaType: { - previews: ["antiope"] + + ret = gtlt + M + '.' + m + '.' + p + } else if (xm) { + ret = '>=' + M + '.0.0 <' + (+M + 1) + '.0.0' + } else if (xp) { + ret = '>=' + M + '.' + m + '.0 <' + M + '.' + (+m + 1) + '.0' + } + + debug('xRange return', ret) + + return ret + }) +} + +// Because * is AND-ed with everything else in the comparator, +// and '' means "any version", just remove the *s entirely. +function replaceStars (comp, options) { + debug('replaceStars', comp, options) + // Looseness is ignored here. star is always as loose as it gets! + return comp.trim().replace(re[STAR], '') +} + +// This function is passed to string.replace(re[HYPHENRANGE]) +// M, m, patch, prerelease, build +// 1.2 - 3.4.5 => >=1.2.0 <=3.4.5 +// 1.2.3 - 3.4 => >=1.2.0 <3.5.0 Any 3.4.x will do +// 1.2 - 3.4 => >=1.2.0 <3.5.0 +function hyphenReplace ($0, + from, fM, fm, fp, fpr, fb, + to, tM, tm, tp, tpr, tb) { + if (isX(fM)) { + from = '' + } else if (isX(fm)) { + from = '>=' + fM + '.0.0' + } else if (isX(fp)) { + from = '>=' + fM + '.' + fm + '.0' + } else { + from = '>=' + from + } + + if (isX(tM)) { + to = '' + } else if (isX(tm)) { + to = '<' + (+tM + 1) + '.0.0' + } else if (isX(tp)) { + to = '<' + tM + '.' + (+tm + 1) + '.0' + } else if (tpr) { + to = '<=' + tM + '.' + tm + '.' + tp + '-' + tpr + } else { + to = '<=' + to + } + + return (from + ' ' + to).trim() +} + +// if ANY of the sets match ALL of its comparators, then pass +Range.prototype.test = function (version) { + if (!version) { + return false + } + + if (typeof version === 'string') { + version = new SemVer(version, this.options) + } + + for (var i = 0; i < this.set.length; i++) { + if (testSet(this.set[i], version, this.options)) { + return true + } + } + return false +} + +function testSet (set, version, options) { + for (var i = 0; i < set.length; i++) { + if (!set[i].test(version)) { + return false + } + } + + if (version.prerelease.length && !options.includePrerelease) { + // Find the set of versions that are allowed to have prereleases + // For example, ^1.2.3-pr.1 desugars to >=1.2.3-pr.1 <2.0.0 + // That should allow `1.2.3-pr.2` to pass. + // However, `1.2.4-alpha.notready` should NOT be allowed, + // even though it's within the range set by the comparators. + for (i = 0; i < set.length; i++) { + debug(set[i].semver) + if (set[i].semver === ANY) { + continue } - }], - createSuite: ["POST /repos/{owner}/{repo}/check-suites", { - mediaType: { - previews: ["antiope"] + + if (set[i].semver.prerelease.length > 0) { + var allowed = set[i].semver + if (allowed.major === version.major && + allowed.minor === version.minor && + allowed.patch === version.patch) { + return true + } } - }], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] + } + + // Version has a -pre, but it's not one of the ones we like. + return false + } + + return true +} + +exports.satisfies = satisfies +function satisfies (version, range, options) { + try { + range = new Range(range, options) + } catch (er) { + return false + } + return range.test(version) +} + +exports.maxSatisfying = maxSatisfying +function maxSatisfying (versions, range, options) { + var max = null + var maxSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!max || maxSV.compare(v) === -1) { + // compare(max, v, true) + max = v + maxSV = new SemVer(max, options) } - }], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}", { - mediaType: { - previews: ["antiope"] + } + }) + return max +} + +exports.minSatisfying = minSatisfying +function minSatisfying (versions, range, options) { + var min = null + var minSV = null + try { + var rangeObj = new Range(range, options) + } catch (er) { + return null + } + versions.forEach(function (v) { + if (rangeObj.test(v)) { + // satisfies(v, range, options) + if (!min || minSV.compare(v) === 1) { + // compare(min, v, true) + min = v + minSV = new SemVer(min, options) } - }], - listAnnotations: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", { - mediaType: { - previews: ["antiope"] + } + }) + return min +} + +exports.minVersion = minVersion +function minVersion (range, loose) { + range = new Range(range, loose) + + var minver = new SemVer('0.0.0') + if (range.test(minver)) { + return minver + } + + minver = new SemVer('0.0.0-0') + if (range.test(minver)) { + return minver + } + + minver = null + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + comparators.forEach(function (comparator) { + // Clone to avoid manipulating the comparator's semver object. + var compver = new SemVer(comparator.semver.version) + switch (comparator.operator) { + case '>': + if (compver.prerelease.length === 0) { + compver.patch++ + } else { + compver.prerelease.push(0) + } + compver.raw = compver.format() + /* fallthrough */ + case '': + case '>=': + if (!minver || gt(minver, compver)) { + minver = compver + } + break + case '<': + case '<=': + /* Ignore maximum versions */ + break + /* istanbul ignore next */ + default: + throw new Error('Unexpected operation: ' + comparator.operator) } - }], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs", { - mediaType: { - previews: ["antiope"] + }) + } + + if (minver && range.test(minver)) { + return minver + } + + return null +} + +exports.validRange = validRange +function validRange (range, options) { + try { + // Return '*' instead of '' so that truthiness works. + // This will throw if it's invalid anyway + return new Range(range, options).range || '*' + } catch (er) { + return null + } +} + +// Determine if version is less than all the versions possible in the range +exports.ltr = ltr +function ltr (version, range, options) { + return outside(version, range, '<', options) +} + +// Determine if version is greater than all the versions possible in the range. +exports.gtr = gtr +function gtr (version, range, options) { + return outside(version, range, '>', options) +} + +exports.outside = outside +function outside (version, range, hilo, options) { + version = new SemVer(version, options) + range = new Range(range, options) + + var gtfn, ltefn, ltfn, comp, ecomp + switch (hilo) { + case '>': + gtfn = gt + ltefn = lte + ltfn = lt + comp = '>' + ecomp = '>=' + break + case '<': + gtfn = lt + ltefn = gte + ltfn = gt + comp = '<' + ecomp = '<=' + break + default: + throw new TypeError('Must provide a hilo val of "<" or ">"') + } + + // If it satisifes the range it is not outside + if (satisfies(version, range, options)) { + return false + } + + // From now on, variable terms are as if we're in "gtr" mode. + // but note that everything is flipped for the "ltr" function. + + for (var i = 0; i < range.set.length; ++i) { + var comparators = range.set[i] + + var high = null + var low = null + + comparators.forEach(function (comparator) { + if (comparator.semver === ANY) { + comparator = new Comparator('>=0.0.0') } - }], - listForSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", { - mediaType: { - previews: ["antiope"] + high = high || comparator + low = low || comparator + if (gtfn(comparator.semver, high.semver, options)) { + high = comparator + } else if (ltfn(comparator.semver, low.semver, options)) { + low = comparator } - }], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites", { - mediaType: { - previews: ["antiope"] + }) + + // If the edge version comparator has a operator then our version + // isn't outside it + if (high.operator === comp || high.operator === ecomp) { + return false + } + + // If the lowest version comparator has an operator and our version + // is less than it then it isn't higher than the range + if ((!low.operator || low.operator === comp) && + ltefn(version, low.semver)) { + return false + } else if (low.operator === ecomp && ltfn(version, low.semver)) { + return false + } + } + return true +} + +exports.prerelease = prerelease +function prerelease (version, options) { + var parsed = parse(version, options) + return (parsed && parsed.prerelease.length) ? parsed.prerelease : null +} + +exports.intersects = intersects +function intersects (r1, r2, options) { + r1 = new Range(r1, options) + r2 = new Range(r2, options) + return r1.intersects(r2) +} + +exports.coerce = coerce +function coerce (version) { + if (version instanceof SemVer) { + return version + } + + if (typeof version !== 'string') { + return null + } + + var match = version.match(re[COERCE]) + + if (match == null) { + return null + } + + return parse(match[1] + + '.' + (match[2] || '0') + + '.' + (match[3] || '0')) +} + + +/***/ }), +/* 281 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const internal_globber_1 = __webpack_require__(297); +/** + * Constructs a globber + * + * @param patterns Patterns separated by newlines + * @param options Glob options + */ +function create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + return yield internal_globber_1.DefaultGlobber.create(patterns, options); + }); +} +exports.create = create; +//# sourceMappingURL=glob.js.map + +/***/ }), +/* 282 */, +/* 283 */, +/* 284 */, +/* 285 */, +/* 286 */, +/* 287 */, +/* 288 */, +/* 289 */, +/* 290 */, +/* 291 */, +/* 292 */, +/* 293 */ +/***/ (function(module) { + +// Returns a wrapper function that returns a wrapped callback +// The wrapper function should do some stuff, and return a +// presumably different callback function. +// This makes sure that own properties are retained, so that +// decorations and such are not lost along the way. +module.exports = wrappy +function wrappy (fn, cb) { + if (fn && cb) return wrappy(fn)(cb) + + if (typeof fn !== 'function') + throw new TypeError('need wrapper function') + + Object.keys(fn).forEach(function (k) { + wrapper[k] = fn[k] + }) + + return wrapper + + function wrapper() { + var args = new Array(arguments.length) + for (var i = 0; i < args.length; i++) { + args[i] = arguments[i] + } + var ret = fn.apply(this, args) + var cb = args[args.length-1] + if (typeof ret === 'function' && ret !== cb) { + Object.keys(cb).forEach(function (k) { + ret[k] = cb[k] + }) + } + return ret + } +} + + +/***/ }), +/* 294 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const u = __webpack_require__(676).fromCallback +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const mkdir = __webpack_require__(727) +const pathExists = __webpack_require__(322).pathExists + +function outputFile (file, data, encoding, callback) { + if (typeof encoding === 'function') { + callback = encoding + encoding = 'utf8' + } + + const dir = path.dirname(file) + pathExists(dir, (err, itDoes) => { + if (err) return callback(err) + if (itDoes) return fs.writeFile(file, data, encoding, callback) + + mkdir.mkdirs(dir, err => { + if (err) return callback(err) + + fs.writeFile(file, data, encoding, callback) + }) + }) +} + +function outputFileSync (file, ...args) { + const dir = path.dirname(file) + if (fs.existsSync(dir)) { + return fs.writeFileSync(file, ...args) + } + mkdir.mkdirsSync(dir) + fs.writeFileSync(file, ...args) +} + +module.exports = { + outputFile: u(outputFile), + outputFileSync +} + + +/***/ }), +/* 295 */, +/* 296 */, +/* 297 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __await = (this && this.__await) || function (v) { return this instanceof __await ? (this.v = v, this) : new __await(v); } +var __asyncGenerator = (this && this.__asyncGenerator) || function (thisArg, _arguments, generator) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var g = generator.apply(thisArg, _arguments || []), i, q = []; + return i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i; + function verb(n) { if (g[n]) i[n] = function (v) { return new Promise(function (a, b) { q.push([n, v, a, b]) > 1 || resume(n, v); }); }; } + function resume(n, v) { try { step(g[n](v)); } catch (e) { settle(q[0][3], e); } } + function step(r) { r.value instanceof __await ? Promise.resolve(r.value.v).then(fulfill, reject) : settle(q[0][2], r); } + function fulfill(value) { resume("next", value); } + function reject(value) { resume("throw", value); } + function settle(f, v) { if (f(v), q.shift(), q.length) resume(q[0][0], q[0][1]); } +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __webpack_require__(470); +const fs = __webpack_require__(747); +const globOptionsHelper = __webpack_require__(601); +const path = __webpack_require__(622); +const patternHelper = __webpack_require__(597); +const internal_match_kind_1 = __webpack_require__(327); +const internal_pattern_1 = __webpack_require__(923); +const internal_search_state_1 = __webpack_require__(728); +const IS_WINDOWS = process.platform === 'win32'; +class DefaultGlobber { + constructor(options) { + this.patterns = []; + this.searchPaths = []; + this.options = globOptionsHelper.getOptions(options); + } + getSearchPaths() { + // Return a copy + return this.searchPaths.slice(); + } + glob() { + var e_1, _a; + return __awaiter(this, void 0, void 0, function* () { + const result = []; + try { + for (var _b = __asyncValues(this.globGenerator()), _c; _c = yield _b.next(), !_c.done;) { + const itemPath = _c.value; + result.push(itemPath); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (_c && !_c.done && (_a = _b.return)) yield _a.call(_b); + } + finally { if (e_1) throw e_1.error; } + } + return result; + }); + } + globGenerator() { + return __asyncGenerator(this, arguments, function* globGenerator_1() { + // Fill in defaults options + const options = globOptionsHelper.getOptions(this.options); + // Implicit descendants? + const patterns = []; + for (const pattern of this.patterns) { + patterns.push(pattern); + if (options.implicitDescendants && + (pattern.trailingSeparator || + pattern.segments[pattern.segments.length - 1] !== '**')) { + patterns.push(new internal_pattern_1.Pattern(pattern.negate, pattern.segments.concat('**'))); + } + } + // Push the search paths + const stack = []; + for (const searchPath of patternHelper.getSearchPaths(patterns)) { + core.debug(`Search path '${searchPath}'`); + // Exists? + try { + // Intentionally using lstat. Detection for broken symlink + // will be performed later (if following symlinks). + yield __await(fs.promises.lstat(searchPath)); + } + catch (err) { + if (err.code === 'ENOENT') { + continue; + } + throw err; + } + stack.unshift(new internal_search_state_1.SearchState(searchPath, 1)); + } + // Search + const traversalChain = []; // used to detect cycles + while (stack.length) { + // Pop + const item = stack.pop(); + // Match? + const match = patternHelper.match(patterns, item.path); + const partialMatch = !!match || patternHelper.partialMatch(patterns, item.path); + if (!match && !partialMatch) { + continue; + } + // Stat + const stats = yield __await(DefaultGlobber.stat(item, options, traversalChain) + // Broken symlink, or symlink cycle detected, or no longer exists + ); + // Broken symlink, or symlink cycle detected, or no longer exists + if (!stats) { + continue; + } + // Directory + if (stats.isDirectory()) { + // Matched + if (match & internal_match_kind_1.MatchKind.Directory) { + yield yield __await(item.path); + } + // Descend? + else if (!partialMatch) { + continue; + } + // Push the child items in reverse + const childLevel = item.level + 1; + const childItems = (yield __await(fs.promises.readdir(item.path))).map(x => new internal_search_state_1.SearchState(path.join(item.path, x), childLevel)); + stack.push(...childItems.reverse()); + } + // File + else if (match & internal_match_kind_1.MatchKind.File) { + yield yield __await(item.path); + } + } + }); + } + /** + * Constructs a DefaultGlobber + */ + static create(patterns, options) { + return __awaiter(this, void 0, void 0, function* () { + const result = new DefaultGlobber(options); + if (IS_WINDOWS) { + patterns = patterns.replace(/\r\n/g, '\n'); + patterns = patterns.replace(/\r/g, '\n'); + } + const lines = patterns.split('\n').map(x => x.trim()); + for (const line of lines) { + // Empty or comment + if (!line || line.startsWith('#')) { + continue; + } + // Pattern + else { + result.patterns.push(new internal_pattern_1.Pattern(line)); + } + } + result.searchPaths.push(...patternHelper.getSearchPaths(result.patterns)); + return result; + }); + } + static stat(item, options, traversalChain) { + return __awaiter(this, void 0, void 0, function* () { + // Note: + // `stat` returns info about the target of a symlink (or symlink chain) + // `lstat` returns info about a symlink itself + let stats; + if (options.followSymbolicLinks) { + try { + // Use `stat` (following symlinks) + stats = yield fs.promises.stat(item.path); + } + catch (err) { + if (err.code === 'ENOENT') { + if (options.omitBrokenSymbolicLinks) { + core.debug(`Broken symlink '${item.path}'`); + return undefined; + } + throw new Error(`No information found for the path '${item.path}'. This may indicate a broken symbolic link.`); + } + throw err; + } + } + else { + // Use `lstat` (not following symlinks) + stats = yield fs.promises.lstat(item.path); + } + // Note, isDirectory() returns false for the lstat of a symlink + if (stats.isDirectory() && options.followSymbolicLinks) { + // Get the realpath + const realPath = yield fs.promises.realpath(item.path); + // Fixup the traversal chain to match the item level + while (traversalChain.length >= item.level) { + traversalChain.pop(); + } + // Test for a cycle + if (traversalChain.some((x) => x === realPath)) { + core.debug(`Symlink cycle detected for path '${item.path}' and realpath '${realPath}'`); + return undefined; + } + // Update the traversal chain + traversalChain.push(realPath); + } + return stats; + }); + } +} +exports.DefaultGlobber = DefaultGlobber; +//# sourceMappingURL=internal-globber.js.map + +/***/ }), +/* 298 */, +/* 299 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +const VERSION = "2.4.0"; + +/** + * Some “list” response that can be paginated have a different response structure + * + * They have a `total_count` key in the response (search also has `incomplete_results`, + * /installation/repositories also has `repository_selection`), as well as a key with + * the list of the items which name varies from endpoint to endpoint. + * + * Octokit normalizes these responses so that paginated results are always returned following + * the same structure. One challenge is that if the list response has only one page, no Link + * header is provided, so this header alone is not sufficient to check wether a response is + * paginated or not. + * + * We check if a "total_count" key is present in the response data, but also make sure that + * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would + * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref + */ +function normalizePaginatedListResponse(response) { + const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); + if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way + // to retrieve the same information. + + const incompleteResults = response.data.incomplete_results; + const repositorySelection = response.data.repository_selection; + const totalCount = response.data.total_count; + delete response.data.incomplete_results; + delete response.data.repository_selection; + delete response.data.total_count; + const namespaceKey = Object.keys(response.data)[0]; + const data = response.data[namespaceKey]; + response.data = data; + + if (typeof incompleteResults !== "undefined") { + response.data.incomplete_results = incompleteResults; + } + + if (typeof repositorySelection !== "undefined") { + response.data.repository_selection = repositorySelection; + } + + response.data.total_count = totalCount; + return response; +} + +function iterator(octokit, route, parameters) { + const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); + const requestMethod = typeof route === "function" ? route : octokit.request; + const method = options.method; + const headers = options.headers; + let url = options.url; + return { + [Symbol.asyncIterator]: () => ({ + next() { + if (!url) { + return Promise.resolve({ + done: true + }); + } + + return requestMethod({ + method, + url, + headers + }).then(normalizePaginatedListResponse).then(response => { + // `response.headers.link` format: + // '; rel="next", ; rel="last"' + // sets `url` to undefined if "next" URL is not present or `link` header is not set + url = ((response.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1]; + return { + value: response + }; + }); } - }], - rerequestSuite: ["POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest", { - mediaType: { - previews: ["antiope"] + + }) + }; +} + +function paginate(octokit, route, parameters, mapFn) { + if (typeof parameters === "function") { + mapFn = parameters; + parameters = undefined; + } + + return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn); +} + +function gather(octokit, results, iterator, mapFn) { + return iterator.next().then(result => { + if (result.done) { + return results; + } + + let earlyExit = false; + + function done() { + earlyExit = true; + } + + results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data); + + if (earlyExit) { + return results; + } + + return gather(octokit, results, iterator, mapFn); + }); +} + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function paginateRest(octokit) { + return { + paginate: Object.assign(paginate.bind(null, octokit), { + iterator: iterator.bind(null, octokit) + }) + }; +} +paginateRest.VERSION = VERSION; + +exports.paginateRest = paginateRest; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 300 */, +/* 301 */, +/* 302 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = realpath +realpath.realpath = realpath +realpath.sync = realpathSync +realpath.realpathSync = realpathSync +realpath.monkeypatch = monkeypatch +realpath.unmonkeypatch = unmonkeypatch + +var fs = __webpack_require__(747) +var origRealpath = fs.realpath +var origRealpathSync = fs.realpathSync + +var version = process.version +var ok = /^v[0-5]\./.test(version) +var old = __webpack_require__(117) + +function newError (er) { + return er && er.syscall === 'realpath' && ( + er.code === 'ELOOP' || + er.code === 'ENOMEM' || + er.code === 'ENAMETOOLONG' + ) +} + +function realpath (p, cache, cb) { + if (ok) { + return origRealpath(p, cache, cb) + } + + if (typeof cache === 'function') { + cb = cache + cache = null + } + origRealpath(p, cache, function (er, result) { + if (newError(er)) { + old.realpath(p, cache, cb) + } else { + cb(er, result) + } + }) +} + +function realpathSync (p, cache) { + if (ok) { + return origRealpathSync(p, cache) + } + + try { + return origRealpathSync(p, cache) + } catch (er) { + if (newError(er)) { + return old.realpathSync(p, cache) + } else { + throw er + } + } +} + +function monkeypatch () { + fs.realpath = realpath + fs.realpathSync = realpathSync +} + +function unmonkeypatch () { + fs.realpath = origRealpath + fs.realpathSync = origRealpathSync +} + + +/***/ }), +/* 303 */, +/* 304 */, +/* 305 */, +/* 306 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var concatMap = __webpack_require__(896); +var balanced = __webpack_require__(621); + +module.exports = expandTop; + +var escSlash = '\0SLASH'+Math.random()+'\0'; +var escOpen = '\0OPEN'+Math.random()+'\0'; +var escClose = '\0CLOSE'+Math.random()+'\0'; +var escComma = '\0COMMA'+Math.random()+'\0'; +var escPeriod = '\0PERIOD'+Math.random()+'\0'; + +function numeric(str) { + return parseInt(str, 10) == str + ? parseInt(str, 10) + : str.charCodeAt(0); +} + +function escapeBraces(str) { + return str.split('\\\\').join(escSlash) + .split('\\{').join(escOpen) + .split('\\}').join(escClose) + .split('\\,').join(escComma) + .split('\\.').join(escPeriod); +} + +function unescapeBraces(str) { + return str.split(escSlash).join('\\') + .split(escOpen).join('{') + .split(escClose).join('}') + .split(escComma).join(',') + .split(escPeriod).join('.'); +} + + +// Basically just str.split(","), but handling cases +// where we have nested braced sections, which should be +// treated as individual members, like {a,{b,c},d} +function parseCommaParts(str) { + if (!str) + return ['']; + + var parts = []; + var m = balanced('{', '}', str); + + if (!m) + return str.split(','); + + var pre = m.pre; + var body = m.body; + var post = m.post; + var p = pre.split(','); + + p[p.length-1] += '{' + body + '}'; + var postParts = parseCommaParts(post); + if (post.length) { + p[p.length-1] += postParts.shift(); + p.push.apply(p, postParts); + } + + parts.push.apply(parts, p); + + return parts; +} + +function expandTop(str) { + if (!str) + return []; + + // I don't know why Bash 4.3 does this, but it does. + // Anything starting with {} will have the first two bytes preserved + // but *only* at the top level, so {},a}b will not expand to anything, + // but a{},b}c will be expanded to [a}c,abc]. + // One could argue that this is a bug in Bash, but since the goal of + // this module is to match Bash's rules, we escape a leading {} + if (str.substr(0, 2) === '{}') { + str = '\\{\\}' + str.substr(2); + } + + return expand(escapeBraces(str), true).map(unescapeBraces); +} + +function identity(e) { + return e; +} + +function embrace(str) { + return '{' + str + '}'; +} +function isPadded(el) { + return /^-?0\d/.test(el); +} + +function lte(i, y) { + return i <= y; +} +function gte(i, y) { + return i >= y; +} + +function expand(str, isTop) { + var expansions = []; + + var m = balanced('{', '}', str); + if (!m || /\$$/.test(m.pre)) return [str]; + + var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); + var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); + var isSequence = isNumericSequence || isAlphaSequence; + var isOptions = m.body.indexOf(',') >= 0; + if (!isSequence && !isOptions) { + // {a},b} + if (m.post.match(/,.*\}/)) { + str = m.pre + '{' + m.body + escClose + m.post; + return expand(str); + } + return [str]; + } + + var n; + if (isSequence) { + n = m.body.split(/\.\./); + } else { + n = parseCommaParts(m.body); + if (n.length === 1) { + // x{{a,b}}y ==> x{a}y x{b}y + n = expand(n[0], false).map(embrace); + if (n.length === 1) { + var post = m.post.length + ? expand(m.post, false) + : ['']; + return post.map(function(p) { + return m.pre + n[0] + p; + }); } - }], - setSuitesPreferences: ["PATCH /repos/{owner}/{repo}/check-suites/preferences", { - mediaType: { - previews: ["antiope"] + } + } + + // at this point, n is the parts, and we know it's not a comma set + // with a single entry. + + // no need to expand pre, since it is guaranteed to be free of brace-sets + var pre = m.pre; + var post = m.post.length + ? expand(m.post, false) + : ['']; + + var N; + + if (isSequence) { + var x = numeric(n[0]); + var y = numeric(n[1]); + var width = Math.max(n[0].length, n[1].length) + var incr = n.length == 3 + ? Math.abs(numeric(n[2])) + : 1; + var test = lte; + var reverse = y < x; + if (reverse) { + incr *= -1; + test = gte; + } + var pad = n.some(isPadded); + + N = []; + + for (var i = x; test(i, y); i += incr) { + var c; + if (isAlphaSequence) { + c = String.fromCharCode(i); + if (c === '\\') + c = ''; + } else { + c = String(i); + if (pad) { + var need = width - c.length; + if (need > 0) { + var z = new Array(need + 1).join('0'); + if (i < 0) + c = '-' + z + c.slice(1); + else + c = z + c; + } + } } - }], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}", { - mediaType: { - previews: ["antiope"] + N.push(c); + } + } else { + N = concatMap(n, function(el) { return expand(el, false) }); + } + + for (var j = 0; j < N.length; j++) { + for (var k = 0; k < post.length; k++) { + var expansion = pre + N[j] + post[k]; + if (!isTop || isSequence || expansion) + expansions.push(expansion); + } + } + + return expansions; +} + + + +/***/ }), +/* 307 */, +/* 308 */, +/* 309 */, +/* 310 */, +/* 311 */, +/* 312 */, +/* 313 */, +/* 314 */, +/* 315 */ +/***/ (function(module) { + +if (typeof Object.create === 'function') { + // implementation from standard node.js 'util' module + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + ctor.prototype = Object.create(superCtor.prototype, { + constructor: { + value: ctor, + enumerable: false, + writable: true, + configurable: true + } + }) + } + }; +} else { + // old school shim for old browsers + module.exports = function inherits(ctor, superCtor) { + if (superCtor) { + ctor.super_ = superCtor + var TempCtor = function () {} + TempCtor.prototype = superCtor.prototype + ctor.prototype = new TempCtor() + ctor.prototype.constructor = ctor + } + } +} + + +/***/ }), +/* 316 */, +/* 317 */, +/* 318 */, +/* 319 */, +/* 320 */, +/* 321 */, +/* 322 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const u = __webpack_require__(676).fromPromise +const fs = __webpack_require__(869) + +function pathExists (path) { + return fs.access(path).then(() => true).catch(() => false) +} + +module.exports = { + pathExists: u(pathExists), + pathExistsSync: fs.existsSync +} + + +/***/ }), +/* 323 */ +/***/ (function(module) { + +"use strict"; + + +var isStream = module.exports = function (stream) { + return stream !== null && typeof stream === 'object' && typeof stream.pipe === 'function'; +}; + +isStream.writable = function (stream) { + return isStream(stream) && stream.writable !== false && typeof stream._write === 'function' && typeof stream._writableState === 'object'; +}; + +isStream.readable = function (stream) { + return isStream(stream) && stream.readable !== false && typeof stream._read === 'function' && typeof stream._readableState === 'object'; +}; + +isStream.duplex = function (stream) { + return isStream.writable(stream) && isStream.readable(stream); +}; + +isStream.transform = function (stream) { + return isStream.duplex(stream) && typeof stream._transform === 'function' && typeof stream._transformState === 'object'; +}; + + +/***/ }), +/* 324 */, +/* 325 */, +/* 326 */, +/* 327 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Indicates whether a pattern matches a path + */ +var MatchKind; +(function (MatchKind) { + /** Not matched */ + MatchKind[MatchKind["None"] = 0] = "None"; + /** Matched if the path is a directory */ + MatchKind[MatchKind["Directory"] = 1] = "Directory"; + /** Matched if the path is a regular file */ + MatchKind[MatchKind["File"] = 2] = "File"; + /** Matched */ + MatchKind[MatchKind["All"] = 3] = "All"; +})(MatchKind = exports.MatchKind || (exports.MatchKind = {})); +//# sourceMappingURL=internal-match-kind.js.map + +/***/ }), +/* 328 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { getTree, getTreeForProject } = __webpack_require__(101); +const { parentChainFromNode } = __webpack_require__(636); + +/** + * Gets a plain node list of projects ordered by precedence + * @param {string} file - The definition file. It can be a URL or a in the filesystem. + * @param {string} project - The project name to look for. + */ +async function getOrderedListForTree(file, urlPlaceHolders = {}) { + const tree = await getTree(file, urlPlaceHolders); + return getOrderedList(tree); +} + +/** + * Gets a plain node list of projects ordered by precedence for a specific project + * @param {string} file - The definition file. It can be a URL or a in the filesystem. + * @param {string} project - The project name to look for. + */ +async function getOrderedListForProject(file, project, urlPlaceHolders = {}) { + const tree = await getTreeForProject(file, project, urlPlaceHolders); + return getOrderedList([tree]); +} + +/** + * Gets a plain node list of projects ordered by precedence + * @param {string} tree - The tree to iterate + */ +async function getOrderedList(tree) { + const finalLeaves = getFinalLeavesFromTree(tree); + return finalLeaves + .map(leaf => parentChainFromNode(leaf)) + .sort((a, b) => b.length - a.length) + .reduce((acc, chain) => { + acc.push( + ...chain.filter(c => !acc.map(e => e.project).includes(c.project)) + ); + return acc; + }, []); +} + +/** + * It returns back the final leaves from a tree + * @param {Array} nodes the array of nodes + * @param {Array} latestLeavesFromTree + * @param {Array} allreadyVisitedNodes + */ +function getFinalLeavesFromTree( + nodes, + latestLeavesFromTree = [], + allreadyVisitedNodes = [] +) { + if (nodes && nodes.length > 0) { + const allreadyVisitedPreviousNodes = [...allreadyVisitedNodes]; + const notIncludedChildren = nodes + .filter(node => !allreadyVisitedPreviousNodes.includes(node.project)) + .reduce((acc, e) => { + acc.push( + ...e.children.filter( + child => !acc.map(e => e.project).includes(child.project) + ) + ); + return acc; + }, []); + allreadyVisitedNodes.push(...nodes.map(e => e.project)); + getFinalLeavesFromTree( + notIncludedChildren, + latestLeavesFromTree, + allreadyVisitedNodes + ); + + latestLeavesFromTree.push( + ...nodes.filter( + node => + (!node.children || node.children.length === 0) && + !latestLeavesFromTree.map(e => e.project).includes(node.project) + ) + ); + + return latestLeavesFromTree; + } else { + return []; + } +} + +module.exports = { + getOrderedListForProject, + getOrderedListForTree, + getOrderedList +}; + + +/***/ }), +/* 329 */, +/* 330 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { + clone, + doesBranchExist, + merge: gitMerge, + hasPullRequest, + getForkedProject +} = __webpack_require__(484); +const { logger } = __webpack_require__(79); +const { treatUrl } = __webpack_require__(352); +const { checkUrlExist } = __webpack_require__(22); +const { getNodeTriggeringJob } = __webpack_require__(645); +const fs = __webpack_require__(747); + +async function checkoutDefinitionTree(context, nodeChain, flow = "pr") { + const nodeTriggeringTheJob = getNodeTriggeringJob(context, nodeChain); + return Promise.all( + nodeChain.map(async node => { + try { + const result = Promise.resolve({ + project: node.project, + checkoutInfo: + flow === "pr" + ? await checkoutProjectPullRequestFlow( + context, + node, + nodeTriggeringTheJob + ) + : await checkoutProjectBranchFlow( + context, + node, + nodeTriggeringTheJob + ) + }); + logger.info(`[${node.project}] Checked out.`); + return result; + } catch (err) { + throw { project: node.project, message: err }; } - }] - }, - codeScanning: { - getAlert: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_id}"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] + }) + ) + .then(result => { + return result.reduce((acc, curr) => { + acc[curr.project] = curr.checkoutInfo; + return acc; + }, {}); + }) + .catch(err => { + logger.error(`[${err.project}] Error checking it out. ${err.message}`); + throw err.message; + }); +} + +async function checkoutProjectPullRequestFlow( + context, + node, + nodeTriggeringTheJob +) { + logger.info(`[${node.project}] Checking out project`); + const dir = getDir(context.config.rootFolder, node.project); + if (!fs.existsSync(dir)) { + const checkoutInfo = await getCheckoutInfo( + context, + node, + nodeTriggeringTheJob + ); + + if (checkoutInfo == undefined) { + const msg = `[${node.project}] Trying to checking out ${node.project} into '${dir}'. It does not exist.`; + logger.error(msg); + throw new Error(msg); + } + if (checkoutInfo.merge) { + logger.info( + `[${node.project}] Merging ${context.config.github.serverUrl}/${node.project}:${checkoutInfo.targetBranch} into ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}` + ); + try { + await clone( + `${context.config.github.serverUrl}/${node.project}`, + dir, + checkoutInfo.targetBranch + ); + } catch (err) { + logger.error( + `[${node.project}] Error checking out (before merging) ${context.config.github.serverUrl}/${node.repo.group}/${node.project}:${context.config.github.targetBranch}` + ); + throw err; } - }], - getConductCode: ["GET /codes_of_conduct/{key}", { - mediaType: { - previews: ["scarlet-witch"] + try { + await gitMerge( + dir, + checkoutInfo.group, + checkoutInfo.project, + checkoutInfo.branch + ); + } catch (err) { + logger.error( + `[${node.project}] Error merging ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}. Please manually merge it and relaunch.` + ); + throw err; } - }], - getForRepo: ["GET /repos/{owner}/{repo}/community/code_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] + } else { + try { + logger.info( + `[${node.project}] Checking out '${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.branch}' into '${dir}'` + ); + await clone( + `${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}`, + dir, + checkoutInfo.branch + ); + } catch (err) { + logger.error( + `[${node.project}] Error checking out ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}.` + ); + throw err; } - }], - listConductCodes: ["GET /codes_of_conduct", { - mediaType: { - previews: ["scarlet-witch"] + } + return checkoutInfo; + } else { + logger.info( + `[${node.project}] folder ${dir} already exists, nothing to checkout` + ); + return undefined; + } +} + +async function checkoutProjectBranchFlow(context, node, nodeTriggeringTheJob) { + logger.info(`[${node.project}] Checking out project`); + const dir = getDir(context.config.rootFolder, node.project); + if (!fs.existsSync(dir)) { + const checkoutInfo = await getCheckoutInfo( + context, + node, + nodeTriggeringTheJob + ); + + if (checkoutInfo == undefined) { + const msg = `Trying to checking out ${node.project} into '${dir}'. It does not exist.`; + logger.error(msg); + throw new Error(msg); + } + try { + logger.info( + `Checking out '${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}:${checkoutInfo.targetBranch}' into '${dir}'` + ); + await clone( + `${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}`, + dir, + checkoutInfo.targetBranch + ); + } catch (err) { + logger.error( + `Error checking out ${context.config.github.serverUrl}/${checkoutInfo.group}/${checkoutInfo.project}.` + ); + throw err; + } + return checkoutInfo; + } else { + logger.info(`folder ${dir} already exists, nothing to checkout`); + return undefined; + } +} + +async function getCheckoutInfo(context, node, nodeTriggeringTheJob) { + const mapping = getMapping( + nodeTriggeringTheJob.project, + nodeTriggeringTheJob.mapping, + node.project, + node.mapping, + context.config.github.targetBranch + ); + const sourceGroup = context.config.github.sourceGroup; + const sourceBranch = context.config.github.sourceBranch; + const targetGroup = node.repo.group; + const targetProject = node.repo.name; + const targetBranch = mapping.target; + const forkedProjectName = await getForkedProjectName( + context.octokit, + targetGroup, + targetProject, + sourceGroup + ); + logger.info( + `[${targetGroup}/${targetProject}] Getting checkout Info. sourceProject: ${forkedProjectName} sourceGroup: ${sourceGroup}. sourceBranch: ${sourceBranch}. targetGroup: ${targetGroup}. targetBranch: ${targetBranch}. Mapping: ${ + mapping.source + ? "source:" + mapping.source + " target:" + mapping.target + : "Not defined" + }` + ); + return (await doesBranchExist( + context.octokit, + sourceGroup, + forkedProjectName, + sourceBranch + )) + ? { + project: forkedProjectName, + group: sourceGroup, + branch: sourceBranch, + targetGroup, + targetBranch, + merge: await hasPullRequest( + context.octokit, + targetGroup, + targetProject, + sourceBranch, + context.config.github.author + ) } - }, { - renamed: ["codesOfConduct", "getAllCodesOfConduct"] - }] - }, - emojis: { - get: ["GET /emojis"] - }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listPublicForUser: ["GET /users/{username}/gists", {}, { - renamed: ["gists", "listForUser"] - }], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"], - listTemplates: ["GET /gitignore/templates", {}, { - renamed: ["gitignore", "getAllTemplates"] - }] - }, - interactions: { - addOrUpdateRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + : (await doesBranchExist( + context.octokit, + targetGroup, + targetProject, + sourceBranch + )) + ? { + project: targetProject, + group: targetGroup, + branch: sourceBranch, + targetGroup, + targetBranch, + merge: await hasPullRequest( + context.octokit, + targetGroup, + targetProject, + sourceBranch, + context.config.github.author + ) } - }, { - renamed: ["interactions", "setRestrictionsForOrg"] - }], - addOrUpdateRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + : (await doesBranchExist( + context.octokit, + targetGroup, + targetProject, + targetBranch + )) + ? { + project: targetProject, + group: targetGroup, + branch: targetBranch, + targetGroup, + targetBranch, + merge: false } - }, { - renamed: ["interactions", "setRestrictionsForRepo"] - }], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + : undefined; +} + +/** + * it returns back a {source, target} object + * @param {String} projectTriggeringTheJob the project name of the project triggering the job + * @param {Object} projectTriggeringTheJobMapping the mapping object of the project triggering the job + * @param {String} currentProject the project name of the current project + * @param {Object} currentProjectMapping the mappinf object of the current project + * @param {String} targetBranch the target branch + */ +function getMapping( + projectTriggeringTheJob, + projectTriggeringTheJobMapping, + currentProject, + currentProjectMapping, + targetBranch +) { + // If the current project it the project triggering the job there's no mapping + if (currentProject !== projectTriggeringTheJob) { + // If the current project has been excluded from the mapping, there's no mapping + if ( + projectTriggeringTheJobMapping && + (projectTriggeringTheJobMapping.exclude + ? !projectTriggeringTheJobMapping.exclude.includes(currentProject) + : true) + ) { + // The mapping is either taken from the project mapping or from the default one + const mapping = projectTriggeringTheJobMapping.dependencies[ + currentProject + ] + ? projectTriggeringTheJobMapping.dependencies[currentProject] + : projectTriggeringTheJobMapping.dependencies.default; + return { source: mapping.source, target: mapping.target }; + // If the current project has a mapping and the source matched with the targetBranch then this mapping is taken + } else if ( + currentProjectMapping && + currentProjectMapping.source === targetBranch + ) { + return { + source: currentProjectMapping.source, + target: currentProjectMapping.target + }; + } else { + return { target: targetBranch }; + } + } else { + return { target: targetBranch }; + } +} + +function getDir(rootFolder, project) { + const folder = + rootFolder !== undefined && rootFolder !== "" ? rootFolder : "."; + return `${folder}/${project.replace(/ |-/g, "_")}`; +} + +async function getForkedProjectName(octokit, owner, project, wantedOwner) { + if (owner !== wantedOwner) { + const forkedProject = await getForkedProject( + octokit, + owner, + project, + wantedOwner + ); + return !forkedProject || !forkedProject.name ? project : forkedProject.name; + } else { + return project; + } +} + +/** + * it checks what's the proper URL to retrieve definition from in case a URL with ${} expression is defined. It will try sourceGroup/sourceBranch then targetGroup/sourceBranch and then targetGroup/targetBranch in this order. + * @param {Object} context the context information + * @param {Object} definitionFile the definition file path or URL + */ +async function getPlaceHolders(context, definitionFile) { + if (definitionFile.startsWith("http") && definitionFile.includes("${")) { + const placeHolderSource = { + GROUP: context.config.github.sourceGroup, + PROJECT_NAME: context.config.github.project, + BRANCH: context.config.github.sourceBranch + }; + const sourceUrl = treatUrl(definitionFile, placeHolderSource); + if (!(await checkUrlExist(sourceUrl))) { + const placeHoldersTargetSource = { + GROUP: context.config.github.group, + PROJECT_NAME: context.config.github.project, + BRANCH: context.config.github.sourceBranch + }; + const targetSourceUrl = treatUrl( + definitionFile, + placeHoldersTargetSource + ); + if (!(await checkUrlExist(targetSourceUrl))) { + const placeHoldersTarget = { + GROUP: context.config.github.group, + PROJECT_NAME: context.config.github.project, + BRANCH: context.config.github.targetBranch + }; + const targetUrl = treatUrl(definitionFile, placeHoldersTarget); + if (!(await checkUrlExist(targetUrl))) { + throw new Error( + `Definition file ${definitionFile} does not exist for any of these cases: ${sourceUrl}, ${targetSourceUrl} or ${targetUrl}` + ); + } else { + return placeHoldersTarget; + } + } else { + return placeHoldersTargetSource; } - }], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + } else { + return placeHolderSource; + } + } + return {}; +} + +module.exports = { + checkoutDefinitionTree, + getCheckoutInfo, + getDir, + getPlaceHolders, + getMapping +}; + + +/***/ }), +/* 331 */, +/* 332 */, +/* 333 */, +/* 334 */, +/* 335 */, +/* 336 */, +/* 337 */, +/* 338 */, +/* 339 */, +/* 340 */, +/* 341 */, +/* 342 */, +/* 343 */, +/* 344 */, +/* 345 */, +/* 346 */, +/* 347 */, +/* 348 */, +/* 349 */, +/* 350 */, +/* 351 */, +/* 352 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { getTree, getTreeForProject } = __webpack_require__(101); +const { + getOrderedListForTree, + getOrderedListForProject +} = __webpack_require__(328); +const { readDefinitionFile } = __webpack_require__(799); +const { parentChainFromNode } = __webpack_require__(636); +const { treatUrl } = __webpack_require__(824); + +module.exports = { + getTree, + getTreeForProject, + getOrderedListForTree, + getOrderedListForProject, + readDefinitionFile, + parentChainFromNode, + treatUrl +}; + + +/***/ }), +/* 353 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const u = __webpack_require__(676).fromCallback +module.exports = { + move: u(__webpack_require__(92)) +} + + +/***/ }), +/* 354 */, +/* 355 */, +/* 356 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +/*! + * is-plain-object + * + * Copyright (c) 2014-2017, Jon Schlinkert. + * Released under the MIT License. + */ + +function isObject(o) { + return Object.prototype.toString.call(o) === '[object Object]'; +} + +function isPlainObject(o) { + var ctor,prot; + + if (isObject(o) === false) return false; + + // If has modified constructor + ctor = o.constructor; + if (ctor === undefined) return true; + + // If has modified prototype + prot = ctor.prototype; + if (isObject(prot) === false) return false; + + // If constructor does not have an Object-specific method + if (prot.hasOwnProperty('isPrototypeOf') === false) { + return false; + } + + // Most likely a plain Object + return true; +} + +exports.isPlainObject = isPlainObject; + + +/***/ }), +/* 357 */ +/***/ (function(module) { + +module.exports = require("assert"); + +/***/ }), +/* 358 */, +/* 359 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __importStar(__webpack_require__(470)); +const upload_specification_1 = __webpack_require__(590); +const upload_http_client_1 = __webpack_require__(608); +const utils_1 = __webpack_require__(870); +const download_http_client_1 = __webpack_require__(855); +const download_specification_1 = __webpack_require__(532); +const config_variables_1 = __webpack_require__(401); +const path_1 = __webpack_require__(622); +class DefaultArtifactClient { + /** + * Constructs a DefaultArtifactClient + */ + static create() { + return new DefaultArtifactClient(); + } + /** + * Uploads an artifact + */ + uploadArtifact(name, files, rootDirectory, options) { + return __awaiter(this, void 0, void 0, function* () { + utils_1.checkArtifactName(name); + // Get specification for the files being uploaded + const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); + const uploadResponse = { + artifactName: name, + artifactItems: [], + size: 0, + failedItems: [] + }; + const uploadHttpClient = new upload_http_client_1.UploadHttpClient(); + if (uploadSpecification.length === 0) { + core.warning(`No files found that can be uploaded`); + } + else { + // Create an entry for the artifact in the file container + const response = yield uploadHttpClient.createArtifactInFileContainer(name); + if (!response.fileContainerResourceUrl) { + core.debug(response.toString()); + throw new Error('No URL provided by the Artifact Service to upload an artifact to'); + } + core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`); + // Upload each of the files that were found concurrently + const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options); + // Update the size of the artifact to indicate we are done uploading + // The uncompressed size is used for display when downloading a zip of the artifact from the UI + yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name); + core.info(`Finished uploading artifact ${name}. Reported size is ${uploadResult.uploadSize} bytes. There were ${uploadResult.failedItems.length} items that failed to upload`); + uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath); + uploadResponse.size = uploadResult.uploadSize; + uploadResponse.failedItems = uploadResult.failedItems; + } + return uploadResponse; + }); + } + downloadArtifact(name, path, options) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + throw new Error(`Unable to find any artifacts for the associated workflow`); + } + const artifactToDownload = artifacts.value.find(artifact => { + return artifact.name === name; + }); + if (!artifactToDownload) { + throw new Error(`Unable to find an artifact with the name: ${name}`); + } + const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories + const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); + } + else { + // Create all necessary directories recursively before starting any download + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + core.info('Directory structure has been setup for the artifact'); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + return { + artifactName: name, + downloadPath: downloadSpecification.rootDownloadLocation + }; + }); + } + downloadAllArtifacts(path) { + return __awaiter(this, void 0, void 0, function* () { + const downloadHttpClient = new download_http_client_1.DownloadHttpClient(); + const response = []; + const artifacts = yield downloadHttpClient.listArtifacts(); + if (artifacts.count === 0) { + core.info('Unable to find any artifacts for the associated workflow'); + return response; + } + if (!path) { + path = config_variables_1.getWorkSpaceDirectory(); + } + path = path_1.normalize(path); + path = path_1.resolve(path); + let downloadedArtifacts = 0; + while (downloadedArtifacts < artifacts.count) { + const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; + downloadedArtifacts += 1; + // Get container entries for the specific artifact + const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); + const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); + if (downloadSpecification.filesToDownload.length === 0) { + core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); + } + else { + yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); + yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); + } + response.push({ + artifactName: currentArtifactToDownload.name, + downloadPath: downloadSpecification.rootDownloadLocation + }); + } + return response; + }); + } +} +exports.DefaultArtifactClient = DefaultArtifactClient; +//# sourceMappingURL=artifact-client.js.map + +/***/ }), +/* 360 */, +/* 361 */, +/* 362 */, +/* 363 */ +/***/ (function(module) { + +module.exports = register + +function register (state, name, method, options) { + if (typeof method !== 'function') { + throw new Error('method for before hook must be a function') + } + + if (!options) { + options = {} + } + + if (Array.isArray(name)) { + return name.reverse().reduce(function (callback, name) { + return register.bind(null, state, name, callback, options) + }, method)() + } + + return Promise.resolve() + .then(function () { + if (!state.registry[name]) { + return method(options) } - }], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + + return (state.registry[name]).reduce(function (method, registered) { + return registered.hook.bind(null, method, options) + }, method)() + }) +} + + +/***/ }), +/* 364 */, +/* 365 */, +/* 366 */, +/* 367 */, +/* 368 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const u = __webpack_require__(676).fromCallback +const rimraf = __webpack_require__(474) + +module.exports = { + remove: u(rimraf), + removeSync: rimraf.sync +} + + +/***/ }), +/* 369 */, +/* 370 */, +/* 371 */, +/* 372 */, +/* 373 */ +/***/ (function(module) { + +module.exports = require("crypto"); + +/***/ }), +/* 374 */, +/* 375 */ +/***/ (function(module) { + +function formatDate(date) { + return `${date.getFullYear()}${date.getMonth()}${date.getDate()}${date.getHours()}${date.getMinutes()}${date.getSeconds()}`; +} + +module.exports = { formatDate }; + + +/***/ }), +/* 376 */, +/* 377 */, +/* 378 */, +/* 379 */, +/* 380 */, +/* 381 */, +/* 382 */, +/* 383 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const assert = __webpack_require__(357); +const path = __webpack_require__(622); +const pathHelper = __webpack_require__(972); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Helper class for parsing paths into segments + */ +class Path { + /** + * Constructs a Path + * @param itemPath Path or array of segments + */ + constructor(itemPath) { + this.segments = []; + // String + if (typeof itemPath === 'string') { + assert(itemPath, `Parameter 'itemPath' must not be empty`); + // Normalize slashes and trim unnecessary trailing slash + itemPath = pathHelper.safeTrimTrailingSeparator(itemPath); + // Not rooted + if (!pathHelper.hasRoot(itemPath)) { + this.segments = itemPath.split(path.sep); + } + // Rooted + else { + // Add all segments, while not at the root + let remaining = itemPath; + let dir = pathHelper.dirname(remaining); + while (dir !== remaining) { + // Add the segment + const basename = path.basename(remaining); + this.segments.unshift(basename); + // Truncate the last segment + remaining = dir; + dir = pathHelper.dirname(remaining); + } + // Remainder is the root + this.segments.unshift(remaining); + } + } + // Array + else { + // Must not be empty + assert(itemPath.length > 0, `Parameter 'itemPath' must not be an empty array`); + // Each segment + for (let i = 0; i < itemPath.length; i++) { + let segment = itemPath[i]; + // Must not be empty + assert(segment, `Parameter 'itemPath' must not contain any empty segments`); + // Normalize slashes + segment = pathHelper.normalizeSeparators(itemPath[i]); + // Root segment + if (i === 0 && pathHelper.hasRoot(segment)) { + segment = pathHelper.safeTrimTrailingSeparator(segment); + assert(segment === pathHelper.dirname(segment), `Parameter 'itemPath' root segment contains information for multiple segments`); + this.segments.push(segment); + } + // All other segments + else { + // Must not contain slash + assert(!segment.includes(path.sep), `Parameter 'itemPath' contains unexpected path separators`); + this.segments.push(segment); + } + } + } + } + /** + * Converts the path to it's string representation + */ + toString() { + // First segment + let result = this.segments[0]; + // All others + let skipSlash = result.endsWith(path.sep) || (IS_WINDOWS && /^[A-Z]:$/i.test(result)); + for (let i = 1; i < this.segments.length; i++) { + if (skipSlash) { + skipSlash = false; + } + else { + result += path.sep; + } + result += this.segments[i]; + } + return result; + } +} +exports.Path = Path; +//# sourceMappingURL=internal-path.js.map + +/***/ }), +/* 384 */, +/* 385 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var isPlainObject = __webpack_require__(356); +var universalUserAgent = __webpack_require__(40); + +function lowercaseKeys(object) { + if (!object) { + return {}; + } + + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} + +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach(key => { + if (isPlainObject.isPlainObject(options[key])) { + if (!(key in defaults)) Object.assign(result, { + [key]: options[key] + });else result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { + [key]: options[key] + }); + } + }); + return result; +} + +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === undefined) { + delete obj[key]; + } + } + + return obj; +} + +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { + method, + url + } : { + url: method + }, options); + } else { + options = Object.assign({}, route); + } // lowercase header names before merging with defaults to avoid duplicates + + + options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging + + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten + + if (defaults && defaults.mediaType.previews.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews); + } + + mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, "")); + return mergedOptions; +} + +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + + if (names.length === 0) { + return url; + } + + return url + separator + names.map(name => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); + } + + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} + +const urlVariableRegex = /\{[^}]+\}/g; + +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} + +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + + if (!matches) { + return []; + } + + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} + +function omit(object, keysToOmit) { + return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => { + obj[key] = object[key]; + return obj; + }, {}); +} + +// Based on https://github.com/bramstein/url-template, licensed under BSD +// TODO: create separate package. +// +// Copyright (c) 2012-2014, Bram Stein +// All rights reserved. +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions +// are met: +// 1. Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// 2. Redistributions in binary form must reproduce the above copyright +// notice, this list of conditions and the following disclaimer in the +// documentation and/or other materials provided with the distribution. +// 3. The name of the author may not be used to endorse or promote products +// derived from this software without specific prior written permission. +// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED +// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF +// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO +// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, +// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, +// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY +// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING +// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, +// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +/* istanbul ignore file */ +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + + return part; + }).join(""); +} + +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function (c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} + +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} + +function isDefined(value) { + return value !== undefined && value !== null; +} + +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} + +function getValues(context, operator, key, modifier) { + var value = context[key], + result = []; + + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); } - }], - removeRestrictionsForRepo: ["DELETE /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : "")); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); + } + } else { + const tmp = []; + + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function (value) { + tmp.push(encodeValue(operator, value)); + }); + } else { + Object.keys(value).forEach(function (k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); + } + + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); + } } - }], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits", { - mediaType: { - previews: ["sombra"] + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); } - }], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits", { - mediaType: { - previews: ["sombra"] + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + + return result; +} + +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} + +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); } - }] - }, - issues: { - addAssignees: ["POST /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkAssignee: ["GET /repos/{owner}/{repo}/assignees/{assignee}", {}, { - renamed: ["issues", "checkUserCanBeAssigned"] - }], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: ["POST /repos/{owner}/{repo}/issues/{issue_number}/comments"], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}"], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: ["DELETE /repos/{owner}/{repo}/milestones/{milestone_number}"], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: ["GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", { - mediaType: { - previews: ["mockingbird"] + + expression.split(/,/g).forEach(function (variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + + if (operator && operator !== "+") { + var separator = ","; + + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; + } + + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } - }], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels"], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/labels"], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - listMilestonesForRepo: ["GET /repos/{owner}/{repo}/milestones", {}, { - renamed: ["issues", "listMilestones"] - }], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels"], - removeAssignees: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees"], - removeLabel: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}"], - removeLabels: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { - renamed: ["issues", "removeAllLabels"] - }], - replaceAllLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { - renamed: ["issues", "setLabels"] - }], - replaceLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels", {}, { - renamed: ["issues", "replaceAllLabels"] - }], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: ["PATCH /repos/{owner}/{repo}/milestones/{milestone_number}"] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"], - listCommonlyUsed: ["GET /licenses", {}, { - renamed: ["licenses", "getAllCommonlyUsed"] - }] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: ["POST /markdown/raw", { - headers: { - "content-type": "text/plain; charset=utf-8" + } else { + return encodeReserved(literal); + } + }); +} + +function parse(options) { + // https://fetch.spec.whatwg.org/#methods + let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible + + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later + + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + + const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + + if (!isBinaryRequest) { + if (options.mediaType.format) { + // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw + headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(","); + } + + if (options.mediaType.previews.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } // for GET/HEAD requests, set URL query parameters from remaining parameters + // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters + + + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } else { + headers["content-length"] = 0; } - }] - }, - meta: { - get: ["GET /meta"] + } + } // default content-type for JSON if body is set + + + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body. + // fetch does not allow to set `content-length` header, but we can set body to an empty string + + + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; + } // Only return body/request keys if present + + + return Object.assign({ + method, + url, + headers + }, typeof body !== "undefined" ? { + body + } : null, options.request ? { + request: options.request + } : null); +} + +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); +} + +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS = merge(oldDefaults, newDefaults); + const endpoint = endpointWithDefaults.bind(null, DEFAULTS); + return Object.assign(endpoint, { + DEFAULTS, + defaults: withDefaults.bind(null, DEFAULTS), + merge: merge.bind(null, DEFAULTS), + parse + }); +} + +const VERSION = "6.0.8"; + +const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url. +// So we use RequestParameters and add method as additional required property. + +const DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent }, - migrations: { - cancelImport: ["DELETE /repos/{owner}/{repo}/import"], - deleteArchiveForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] - } - }], - deleteArchiveForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] + mediaType: { + format: "", + previews: [] + } +}; + +const endpoint = withDefaults(null, DEFAULTS); + +exports.endpoint = endpoint; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 386 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +function resolveJavascriptUndefined() { + return true; +} + +function constructJavascriptUndefined() { + /*eslint-disable no-undefined*/ + return undefined; +} + +function representJavascriptUndefined() { + return ''; +} + +function isUndefined(object) { + return typeof object === 'undefined'; +} + +module.exports = new Type('tag:yaml.org,2002:js/undefined', { + kind: 'scalar', + resolve: resolveJavascriptUndefined, + construct: constructJavascriptUndefined, + predicate: isUndefined, + represent: representJavascriptUndefined +}); + + +/***/ }), +/* 387 */, +/* 388 */, +/* 389 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const fs = __webpack_require__(747); +const shebangCommand = __webpack_require__(866); + +function readShebang(command) { + // Read the first 150 bytes from the file + const size = 150; + let buffer; + + if (Buffer.alloc) { + // Node.js v4.5+ / v5.10+ + buffer = Buffer.alloc(size); + } else { + // Old Node.js API + buffer = new Buffer(size); + buffer.fill(0); // zero-fill + } + + let fd; + + try { + fd = fs.openSync(command, 'r'); + fs.readSync(fd, buffer, 0, size, 0); + fs.closeSync(fd); + } catch (e) { /* Empty */ } + + // Attempt to extract shebang (null is returned if not a shebang) + return shebangCommand(buffer.toString()); +} + +module.exports = readShebang; + + +/***/ }), +/* 390 */, +/* 391 */, +/* 392 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; + } + + if (typeof process === "object" && "version" in process) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; + } + + return ""; +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 393 */, +/* 394 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +// We use any as a valid input type +/* eslint-disable @typescript-eslint/no-explicit-any */ +Object.defineProperty(exports, "__esModule", { value: true }); +/** + * Sanitizes an input into a string so it can be passed into issueCommand safely + * @param input input to sanitize into a string + */ +function toCommandValue(input) { + if (input === null || input === undefined) { + return ''; + } + else if (typeof input === 'string' || input instanceof String) { + return input; + } + return JSON.stringify(input); +} +exports.toCommandValue = toCommandValue; +//# sourceMappingURL=utils.js.map + +/***/ }), +/* 395 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { create } = __webpack_require__(214); +const core = __webpack_require__(470); +const noFileOptions = __webpack_require__(787); +const { findFilesToUpload } = __webpack_require__(819); +const { logger } = __webpack_require__(79); +var assert = __webpack_require__(357); + +async function run(archiveArtifacts, on = ["success", "failure"]) { + assert(archiveArtifacts, "archiveArtifacts is not defined"); + assert(archiveArtifacts.paths, "archiveArtifacts.paths is not defined"); + assert(archiveArtifacts.name, "archiveArtifacts.name is not defined"); + const path = archiveArtifacts.paths + .filter(pathItem => pathItem && pathItem.path && on.includes(pathItem.on)) + .reduce((acc, pathItem) => acc.concat(pathItem.path, "\n"), "") + .trim(); + try { + logger.info(`Uploading artifacts for path [${path}]`); + const searchResult = await findFilesToUpload(path); + if (searchResult.filesToUpload.length === 0) { + switch (archiveArtifacts.ifNoFilesFound) { + case noFileOptions.error: { + core.setFailed( + `[ERROR] No files were found with the provided path: ${path}. No artifacts will be uploaded.` + ); + break; + } + case noFileOptions.ignore: { + core.info( + `[INFO] No files were found with the provided path: ${path}. No artifacts will be uploaded.` + ); + break; + } + case noFileOptions.warn: + default: { + core.warning( + `[WARNING] No files were found with the provided path: ${path}. No artifacts will be uploaded.` + ); + } } - }], - downloadArchiveForOrg: ["GET /orgs/{org}/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] + } else { + core.info( + `[INFO] With the provided path (${path}), there will be ${searchResult.filesToUpload.length} file(s) uploaded` + ); + core.debug( + `[DEBUG] Root artifact directory is ${searchResult.rootDirectory}` + ); + + const artifactClient = create(); + const options = { + continueOnError: false + }; + const uploadResponse = await artifactClient.uploadArtifact( + archiveArtifacts.name, + searchResult.filesToUpload, + searchResult.rootDirectory, + options + ); + if (uploadResponse.failedItems.length > 0) { + core.setFailed( + `[ERROR] An error was encountered when uploading ${uploadResponse.artifactName}. There were ${uploadResponse.failedItems.length} items that failed to upload.` + ); + } else { + core.info( + `[INFO] Artifact ${uploadResponse.artifactName} has been successfully uploaded!` + ); } - }], - getArchiveForAuthenticatedUser: ["GET /user/migrations/{migration_id}/archive", { - mediaType: { - previews: ["wyandotte"] + return uploadResponse; + } + } catch (err) { + core.setFailed(err.message); + } + return undefined; +} +module.exports = { + run +}; + + +/***/ }), +/* 396 */, +/* 397 */, +/* 398 */, +/* 399 */, +/* 400 */, +/* 401 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +// The number of concurrent uploads that happens at the same time +function getUploadFileConcurrency() { + return 2; +} +exports.getUploadFileConcurrency = getUploadFileConcurrency; +// When uploading large files that can't be uploaded with a single http call, this controls +// the chunk size that is used during upload +function getUploadChunkSize() { + return 8 * 1024 * 1024; // 8 MB Chunks +} +exports.getUploadChunkSize = getUploadChunkSize; +// The maximum number of retries that can be attempted before an upload or download fails +function getRetryLimit() { + return 5; +} +exports.getRetryLimit = getRetryLimit; +// With exponential backoff, the larger the retry count, the larger the wait time before another attempt +// The retry multiplier controls by how much the backOff time increases depending on the number of retries +function getRetryMultiplier() { + return 1.5; +} +exports.getRetryMultiplier = getRetryMultiplier; +// The initial wait time if an upload or download fails and a retry is being attempted for the first time +function getInitialRetryIntervalInMilliseconds() { + return 3000; +} +exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds; +// The number of concurrent downloads that happens at the same time +function getDownloadFileConcurrency() { + return 2; +} +exports.getDownloadFileConcurrency = getDownloadFileConcurrency; +function getRuntimeToken() { + const token = process.env['ACTIONS_RUNTIME_TOKEN']; + if (!token) { + throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable'); + } + return token; +} +exports.getRuntimeToken = getRuntimeToken; +function getRuntimeUrl() { + const runtimeUrl = process.env['ACTIONS_RUNTIME_URL']; + if (!runtimeUrl) { + throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable'); + } + return runtimeUrl; +} +exports.getRuntimeUrl = getRuntimeUrl; +function getWorkFlowRunId() { + const workFlowRunId = process.env['GITHUB_RUN_ID']; + if (!workFlowRunId) { + throw new Error('Unable to get GITHUB_RUN_ID env variable'); + } + return workFlowRunId; +} +exports.getWorkFlowRunId = getWorkFlowRunId; +function getWorkSpaceDirectory() { + const workspaceDirectory = process.env['GITHUB_WORKSPACE']; + if (!workspaceDirectory) { + throw new Error('Unable to get GITHUB_WORKSPACE env variable'); + } + return workspaceDirectory; +} +exports.getWorkSpaceDirectory = getWorkSpaceDirectory; +//# sourceMappingURL=config-variables.js.map + +/***/ }), +/* 402 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +/*! + * Tmp + * + * Copyright (c) 2011-2017 KARASZI Istvan + * + * MIT Licensed + */ + +/* + * Module dependencies. + */ +const fs = __webpack_require__(747); +const os = __webpack_require__(87); +const path = __webpack_require__(622); +const crypto = __webpack_require__(373); +const _c = fs.constants && os.constants ? + { fs: fs.constants, os: os.constants } : + process.binding('constants'); +const rimraf = __webpack_require__(569); + +/* + * The working inner variables. + */ +const + // the random characters to choose from + RANDOM_CHARS = '0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz', + + TEMPLATE_PATTERN = /XXXXXX/, + + DEFAULT_TRIES = 3, + + CREATE_FLAGS = (_c.O_CREAT || _c.fs.O_CREAT) | (_c.O_EXCL || _c.fs.O_EXCL) | (_c.O_RDWR || _c.fs.O_RDWR), + + EBADF = _c.EBADF || _c.os.errno.EBADF, + ENOENT = _c.ENOENT || _c.os.errno.ENOENT, + + DIR_MODE = 448 /* 0o700 */, + FILE_MODE = 384 /* 0o600 */, + + EXIT = 'exit', + + SIGINT = 'SIGINT', + + // this will hold the objects need to be removed on exit + _removeObjects = []; + +var + _gracefulCleanup = false; + +/** + * Random name generator based on crypto. + * Adapted from http://blog.tompawlak.org/how-to-generate-random-values-nodejs-javascript + * + * @param {number} howMany + * @returns {string} the generated random name + * @private + */ +function _randomChars(howMany) { + var + value = [], + rnd = null; + + // make sure that we do not fail because we ran out of entropy + try { + rnd = crypto.randomBytes(howMany); + } catch (e) { + rnd = crypto.pseudoRandomBytes(howMany); + } + + for (var i = 0; i < howMany; i++) { + value.push(RANDOM_CHARS[rnd[i] % RANDOM_CHARS.length]); + } + + return value.join(''); +} + +/** + * Checks whether the `obj` parameter is defined or not. + * + * @param {Object} obj + * @returns {boolean} true if the object is undefined + * @private + */ +function _isUndefined(obj) { + return typeof obj === 'undefined'; +} + +/** + * Parses the function arguments. + * + * This function helps to have optional arguments. + * + * @param {(Options|Function)} options + * @param {Function} callback + * @returns {Array} parsed arguments + * @private + */ +function _parseArguments(options, callback) { + /* istanbul ignore else */ + if (typeof options === 'function') { + return [{}, options]; + } + + /* istanbul ignore else */ + if (_isUndefined(options)) { + return [{}, callback]; + } + + return [options, callback]; +} + +/** + * Generates a new temporary name. + * + * @param {Object} opts + * @returns {string} the new random name according to opts + * @private + */ +function _generateTmpName(opts) { + + const tmpDir = _getTmpDir(); + + // fail early on missing tmp dir + if (isBlank(opts.dir) && isBlank(tmpDir)) { + throw new Error('No tmp dir specified'); + } + + /* istanbul ignore else */ + if (!isBlank(opts.name)) { + return path.join(opts.dir || tmpDir, opts.name); + } + + // mkstemps like template + // opts.template has already been guarded in tmpName() below + /* istanbul ignore else */ + if (opts.template) { + var template = opts.template; + // make sure that we prepend the tmp path if none was given + /* istanbul ignore else */ + if (path.basename(template) === template) + template = path.join(opts.dir || tmpDir, template); + return template.replace(TEMPLATE_PATTERN, _randomChars(6)); + } + + // prefix and postfix + const name = [ + (isBlank(opts.prefix) ? 'tmp-' : opts.prefix), + process.pid, + _randomChars(12), + (opts.postfix ? opts.postfix : '') + ].join(''); + + return path.join(opts.dir || tmpDir, name); +} + +/** + * Gets a temporary file name. + * + * @param {(Options|tmpNameCallback)} options options or callback + * @param {?tmpNameCallback} callback the callback function + */ +function tmpName(options, callback) { + var + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1], + tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; + + /* istanbul ignore else */ + if (isNaN(tries) || tries < 0) + return cb(new Error('Invalid tries')); + + /* istanbul ignore else */ + if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) + return cb(new Error('Invalid template provided')); + + (function _getUniqueName() { + try { + const name = _generateTmpName(opts); + + // check whether the path exists then retry if needed + fs.stat(name, function (err) { + /* istanbul ignore else */ + if (!err) { + /* istanbul ignore else */ + if (tries-- > 0) return _getUniqueName(); + + return cb(new Error('Could not get a unique tmp filename, max tries reached ' + name)); + } + + cb(null, name); + }); + } catch (err) { + cb(err); + } + }()); +} + +/** + * Synchronous version of tmpName. + * + * @param {Object} options + * @returns {string} the generated random name + * @throws {Error} if the options are invalid or could not generate a filename + */ +function tmpNameSync(options) { + var + args = _parseArguments(options), + opts = args[0], + tries = !isBlank(opts.name) ? 1 : opts.tries || DEFAULT_TRIES; + + /* istanbul ignore else */ + if (isNaN(tries) || tries < 0) + throw new Error('Invalid tries'); + + /* istanbul ignore else */ + if (opts.template && !opts.template.match(TEMPLATE_PATTERN)) + throw new Error('Invalid template provided'); + + do { + const name = _generateTmpName(opts); + try { + fs.statSync(name); + } catch (e) { + return name; + } + } while (tries-- > 0); + + throw new Error('Could not get a unique tmp filename, max tries reached'); +} + +/** + * Creates and opens a temporary file. + * + * @param {(Options|fileCallback)} options the config options or the callback function + * @param {?fileCallback} callback + */ +function file(options, callback) { + var + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create and open the file + fs.open(name, CREATE_FLAGS, opts.mode || FILE_MODE, function _fileCreated(err, fd) { + /* istanbul ignore else */ + if (err) return cb(err); + + if (opts.discardDescriptor) { + return fs.close(fd, function _discardCallback(err) { + /* istanbul ignore else */ + if (err) { + // Low probability, and the file exists, so this could be + // ignored. If it isn't we certainly need to unlink the + // file, and if that fails too its error is more + // important. + try { + fs.unlinkSync(name); + } catch (e) { + if (!isENOENT(e)) { + err = e; + } + } + return cb(err); + } + cb(null, name, undefined, _prepareTmpFileRemoveCallback(name, -1, opts)); + }); } - }], - getCommitAuthors: ["GET /repos/{owner}/{repo}/import/authors"], - getImportProgress: ["GET /repos/{owner}/{repo}/import", {}, { - renamed: ["migrations", "getImportStatus"] - }], - getImportStatus: ["GET /repos/{owner}/{repo}/import"], - getLargeFiles: ["GET /repos/{owner}/{repo}/import/large_files"], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] + /* istanbul ignore else */ + if (opts.detachDescriptor) { + return cb(null, name, fd, _prepareTmpFileRemoveCallback(name, -1, opts)); } - }], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}", { - mediaType: { - previews: ["wyandotte"] + cb(null, name, fd, _prepareTmpFileRemoveCallback(name, fd, opts)); + }); + }); +} + +/** + * Synchronous version of file. + * + * @param {Options} options + * @returns {FileSyncObject} object consists of name, fd and removeCallback + * @throws {Error} if cannot create a file + */ +function fileSync(options) { + var + args = _parseArguments(options), + opts = args[0]; + + const discardOrDetachDescriptor = opts.discardDescriptor || opts.detachDescriptor; + const name = tmpNameSync(opts); + var fd = fs.openSync(name, CREATE_FLAGS, opts.mode || FILE_MODE); + /* istanbul ignore else */ + if (opts.discardDescriptor) { + fs.closeSync(fd); + fd = undefined; + } + + return { + name: name, + fd: fd, + removeCallback: _prepareTmpFileRemoveCallback(name, discardOrDetachDescriptor ? -1 : fd, opts) + }; +} + +/** + * Creates a temporary directory. + * + * @param {(Options|dirCallback)} options the options or the callback function + * @param {?dirCallback} callback + */ +function dir(options, callback) { + var + args = _parseArguments(options, callback), + opts = args[0], + cb = args[1]; + + // gets a temporary filename + tmpName(opts, function _tmpNameCreated(err, name) { + /* istanbul ignore else */ + if (err) return cb(err); + + // create the directory + fs.mkdir(name, opts.mode || DIR_MODE, function _dirCreated(err) { + /* istanbul ignore else */ + if (err) return cb(err); + + cb(null, name, _prepareTmpDirRemoveCallback(name, opts)); + }); + }); +} + +/** + * Synchronous version of dir. + * + * @param {Options} options + * @returns {DirSyncObject} object consists of name and removeCallback + * @throws {Error} if it cannot create a directory + */ +function dirSync(options) { + var + args = _parseArguments(options), + opts = args[0]; + + const name = tmpNameSync(opts); + fs.mkdirSync(name, opts.mode || DIR_MODE); + + return { + name: name, + removeCallback: _prepareTmpDirRemoveCallback(name, opts) + }; +} + +/** + * Removes files asynchronously. + * + * @param {Object} fdPath + * @param {Function} next + * @private + */ +function _removeFileAsync(fdPath, next) { + const _handler = function (err) { + if (err && !isENOENT(err)) { + // reraise any unanticipated error + return next(err); + } + next(); + } + + if (0 <= fdPath[0]) + fs.close(fdPath[0], function (err) { + fs.unlink(fdPath[1], _handler); + }); + else fs.unlink(fdPath[1], _handler); +} + +/** + * Removes files synchronously. + * + * @param {Object} fdPath + * @private + */ +function _removeFileSync(fdPath) { + try { + if (0 <= fdPath[0]) fs.closeSync(fdPath[0]); + } catch (e) { + // reraise any unanticipated error + if (!isEBADF(e) && !isENOENT(e)) throw e; + } finally { + try { + fs.unlinkSync(fdPath[1]); + } + catch (e) { + // reraise any unanticipated error + if (!isENOENT(e)) throw e; + } + } +} + +/** + * Prepares the callback for removal of the temporary file. + * + * @param {string} name the path of the file + * @param {number} fd file descriptor + * @param {Object} opts + * @returns {fileCallback} + * @private + */ +function _prepareTmpFileRemoveCallback(name, fd, opts) { + const removeCallbackSync = _prepareRemoveCallback(_removeFileSync, [fd, name]); + const removeCallback = _prepareRemoveCallback(_removeFileAsync, [fd, name], removeCallbackSync); + + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return removeCallback; +} + +/** + * Simple wrapper for rimraf. + * + * @param {string} dirPath + * @param {Function} next + * @private + */ +function _rimrafRemoveDirWrapper(dirPath, next) { + rimraf(dirPath, next); +} + +/** + * Simple wrapper for rimraf.sync. + * + * @param {string} dirPath + * @private + */ +function _rimrafRemoveDirSyncWrapper(dirPath, next) { + try { + return next(null, rimraf.sync(dirPath)); + } catch (err) { + return next(err); + } +} + +/** + * Prepares the callback for removal of the temporary directory. + * + * @param {string} name + * @param {Object} opts + * @returns {Function} the callback + * @private + */ +function _prepareTmpDirRemoveCallback(name, opts) { + const removeFunction = opts.unsafeCleanup ? _rimrafRemoveDirWrapper : fs.rmdir.bind(fs); + const removeFunctionSync = opts.unsafeCleanup ? _rimrafRemoveDirSyncWrapper : fs.rmdirSync.bind(fs); + const removeCallbackSync = _prepareRemoveCallback(removeFunctionSync, name); + const removeCallback = _prepareRemoveCallback(removeFunction, name, removeCallbackSync); + if (!opts.keep) _removeObjects.unshift(removeCallbackSync); + + return removeCallback; +} + +/** + * Creates a guarded function wrapping the removeFunction call. + * + * @param {Function} removeFunction + * @param {Object} arg + * @returns {Function} + * @private + */ +function _prepareRemoveCallback(removeFunction, arg, cleanupCallbackSync) { + var called = false; + + return function _cleanupCallback(next) { + next = next || function () {}; + if (!called) { + const toRemove = cleanupCallbackSync || _cleanupCallback; + const index = _removeObjects.indexOf(toRemove); + /* istanbul ignore else */ + if (index >= 0) _removeObjects.splice(index, 1); + + called = true; + // sync? + if (removeFunction.length === 1) { + try { + removeFunction(arg); + return next(null); + } + catch (err) { + // if no next is provided and since we are + // in silent cleanup mode on process exit, + // we will ignore the error + return next(err); + } + } else return removeFunction(arg, next); + } else return next(new Error('cleanup callback has already been called')); + }; +} + +/** + * The garbage collector. + * + * @private + */ +function _garbageCollector() { + /* istanbul ignore else */ + if (!_gracefulCleanup) return; + + // the function being called removes itself from _removeObjects, + // loop until _removeObjects is empty + while (_removeObjects.length) { + try { + _removeObjects[0](); + } catch (e) { + // already removed? + } + } +} + +/** + * Helper for testing against EBADF to compensate changes made to Node 7.x under Windows. + */ +function isEBADF(error) { + return isExpectedError(error, -EBADF, 'EBADF'); +} + +/** + * Helper for testing against ENOENT to compensate changes made to Node 7.x under Windows. + */ +function isENOENT(error) { + return isExpectedError(error, -ENOENT, 'ENOENT'); +} + +/** + * Helper to determine whether the expected error code matches the actual code and errno, + * which will differ between the supported node versions. + * + * - Node >= 7.0: + * error.code {string} + * error.errno {string|number} any numerical value will be negated + * + * - Node >= 6.0 < 7.0: + * error.code {string} + * error.errno {number} negated + * + * - Node >= 4.0 < 6.0: introduces SystemError + * error.code {string} + * error.errno {number} negated + * + * - Node >= 0.10 < 4.0: + * error.code {number} negated + * error.errno n/a + */ +function isExpectedError(error, code, errno) { + return error.code === code || error.code === errno; +} + +/** + * Helper which determines whether a string s is blank, that is undefined, or empty or null. + * + * @private + * @param {string} s + * @returns {Boolean} true whether the string s is blank, false otherwise + */ +function isBlank(s) { + return s === null || s === undefined || !s.trim(); +} + +/** + * Sets the graceful cleanup. + */ +function setGracefulCleanup() { + _gracefulCleanup = true; +} + +/** + * Returns the currently configured tmp dir from os.tmpdir(). + * + * @private + * @returns {string} the currently configured tmp dir + */ +function _getTmpDir() { + return os.tmpdir(); +} + +/** + * If there are multiple different versions of tmp in place, make sure that + * we recognize the old listeners. + * + * @param {Function} listener + * @private + * @returns {Boolean} true whether listener is a legacy listener + */ +function _is_legacy_listener(listener) { + return (listener.name === '_exit' || listener.name === '_uncaughtExceptionThrown') + && listener.toString().indexOf('_garbageCollector();') > -1; +} + +/** + * Safely install SIGINT listener. + * + * NOTE: this will only work on OSX and Linux. + * + * @private + */ +function _safely_install_sigint_listener() { + + const listeners = process.listeners(SIGINT); + const existingListeners = []; + for (let i = 0, length = listeners.length; i < length; i++) { + const lstnr = listeners[i]; + /* istanbul ignore else */ + if (lstnr.name === '_tmp$sigint_listener') { + existingListeners.push(lstnr); + process.removeListener(SIGINT, lstnr); + } + } + process.on(SIGINT, function _tmp$sigint_listener(doExit) { + for (let i = 0, length = existingListeners.length; i < length; i++) { + // let the existing listener do the garbage collection (e.g. jest sandbox) + try { + existingListeners[i](false); + } catch (err) { + // ignore } - }], - listForAuthenticatedUser: ["GET /user/migrations", { - mediaType: { - previews: ["wyandotte"] + } + try { + // force the garbage collector even it is called again in the exit listener + _garbageCollector(); + } finally { + if (!!doExit) { + process.exit(0); } - }], - listForOrg: ["GET /orgs/{org}/migrations", { - mediaType: { - previews: ["wyandotte"] + } + }); +} + +/** + * Safely install process exit listener. + * + * @private + */ +function _safely_install_exit_listener() { + const listeners = process.listeners(EXIT); + + // collect any existing listeners + const existingListeners = []; + for (let i = 0, length = listeners.length; i < length; i++) { + const lstnr = listeners[i]; + /* istanbul ignore else */ + // TODO: remove support for legacy listeners once release 1.0.0 is out + if (lstnr.name === '_tmp$safe_listener' || _is_legacy_listener(lstnr)) { + // we must forget about the uncaughtException listener, hopefully it is ours + if (lstnr.name !== '_uncaughtExceptionThrown') { + existingListeners.push(lstnr); } - }], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] + process.removeListener(EXIT, lstnr); + } + } + // TODO: what was the data parameter good for? + process.addListener(EXIT, function _tmp$safe_listener(data) { + for (let i = 0, length = existingListeners.length; i < length; i++) { + // let the existing listener do the garbage collection (e.g. jest sandbox) + try { + existingListeners[i](data); + } catch (err) { + // ignore } - }], - listReposForUser: ["GET /user/{migration_id}/repositories", { - mediaType: { - previews: ["wyandotte"] + } + _garbageCollector(); + }); +} + +_safely_install_exit_listener(); +_safely_install_sigint_listener(); + +/** + * Configuration options. + * + * @typedef {Object} Options + * @property {?number} tries the number of tries before give up the name generation + * @property {?string} template the "mkstemp" like filename template + * @property {?string} name fix name + * @property {?string} dir the tmp directory to use + * @property {?string} prefix prefix for the generated name + * @property {?string} postfix postfix for the generated name + * @property {?boolean} unsafeCleanup recursively removes the created temporary directory, even when it's not empty + */ + +/** + * @typedef {Object} FileSyncObject + * @property {string} name the name of the file + * @property {string} fd the file descriptor + * @property {fileCallback} removeCallback the callback function to remove the file + */ + +/** + * @typedef {Object} DirSyncObject + * @property {string} name the name of the directory + * @property {fileCallback} removeCallback the callback function to remove the directory + */ + +/** + * @callback tmpNameCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + */ + +/** + * @callback fileCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {number} fd the file descriptor + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * @callback dirCallback + * @param {?Error} err the error object if anything goes wrong + * @param {string} name the temporary file name + * @param {cleanupCallback} fn the cleanup callback function + */ + +/** + * Removes the temporary created file or directory. + * + * @callback cleanupCallback + * @param {simpleCallback} [next] function to call after entry was removed + */ + +/** + * Callback function for function composition. + * @see {@link https://github.com/raszi/node-tmp/issues/57|raszi/node-tmp#57} + * + * @callback simpleCallback + */ + +// exporting all the needed methods + +// evaluate os.tmpdir() lazily, mainly for simplifying testing but it also will +// allow users to reconfigure the temporary directory +Object.defineProperty(module.exports, 'tmpdir', { + enumerable: true, + configurable: false, + get: function () { + return _getTmpDir(); + } +}); + +module.exports.dir = dir; +module.exports.dirSync = dirSync; + +module.exports.file = file; +module.exports.fileSync = fileSync; + +module.exports.tmpName = tmpName; +module.exports.tmpNameSync = tmpNameSync; + +module.exports.setGracefulCleanup = setGracefulCleanup; + + +/***/ }), +/* 403 */, +/* 404 */, +/* 405 */, +/* 406 */, +/* 407 */, +/* 408 */, +/* 409 */, +/* 410 */, +/* 411 */, +/* 412 */, +/* 413 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = __webpack_require__(141); + + +/***/ }), +/* 414 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + + +var yaml = __webpack_require__(9); + + +module.exports = yaml; + + +/***/ }), +/* 415 */, +/* 416 */, +/* 417 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var common = __webpack_require__(740); +var Type = __webpack_require__(945); + +var YAML_FLOAT_PATTERN = new RegExp( + // 2.5e4, 2.5 and integers + '^(?:[-+]?(?:0|[1-9][0-9_]*)(?:\\.[0-9_]*)?(?:[eE][-+]?[0-9]+)?' + + // .2e4, .2 + // special case, seems not from spec + '|\\.[0-9_]+(?:[eE][-+]?[0-9]+)?' + + // 20:59 + '|[-+]?[0-9][0-9_]*(?::[0-5]?[0-9])+\\.[0-9_]*' + + // .inf + '|[-+]?\\.(?:inf|Inf|INF)' + + // .nan + '|\\.(?:nan|NaN|NAN))$'); + +function resolveYamlFloat(data) { + if (data === null) return false; + + if (!YAML_FLOAT_PATTERN.test(data) || + // Quick hack to not allow integers end with `_` + // Probably should update regexp & check speed + data[data.length - 1] === '_') { + return false; + } + + return true; +} + +function constructYamlFloat(data) { + var value, sign, base, digits; + + value = data.replace(/_/g, '').toLowerCase(); + sign = value[0] === '-' ? -1 : 1; + digits = []; + + if ('+-'.indexOf(value[0]) >= 0) { + value = value.slice(1); + } + + if (value === '.inf') { + return (sign === 1) ? Number.POSITIVE_INFINITY : Number.NEGATIVE_INFINITY; + + } else if (value === '.nan') { + return NaN; + + } else if (value.indexOf(':') >= 0) { + value.split(':').forEach(function (v) { + digits.unshift(parseFloat(v, 10)); + }); + + value = 0.0; + base = 1; + + digits.forEach(function (d) { + value += d * base; + base *= 60; + }); + + return sign * value; + + } + return sign * parseFloat(value, 10); +} + + +var SCIENTIFIC_WITHOUT_DOT = /^[-+]?[0-9]+e/; + +function representYamlFloat(object, style) { + var res; + + if (isNaN(object)) { + switch (style) { + case 'lowercase': return '.nan'; + case 'uppercase': return '.NAN'; + case 'camelcase': return '.NaN'; + } + } else if (Number.POSITIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '.inf'; + case 'uppercase': return '.INF'; + case 'camelcase': return '.Inf'; + } + } else if (Number.NEGATIVE_INFINITY === object) { + switch (style) { + case 'lowercase': return '-.inf'; + case 'uppercase': return '-.INF'; + case 'camelcase': return '-.Inf'; + } + } else if (common.isNegativeZero(object)) { + return '-0.0'; + } + + res = object.toString(10); + + // JS stringifier can build scientific format without dots: 5e-100, + // while YAML requres dot: 5.e-100. Fix it with simple hack + + return SCIENTIFIC_WITHOUT_DOT.test(res) ? res.replace('e', '.e') : res; +} + +function isFloat(object) { + return (Object.prototype.toString.call(object) === '[object Number]') && + (object % 1 !== 0 || common.isNegativeZero(object)); +} + +module.exports = new Type('tag:yaml.org,2002:float', { + kind: 'scalar', + resolve: resolveYamlFloat, + construct: constructYamlFloat, + predicate: isFloat, + represent: representYamlFloat, + defaultStyle: 'lowercase' +}); + + +/***/ }), +/* 418 */, +/* 419 */, +/* 420 */, +/* 421 */, +/* 422 */, +/* 423 */, +/* 424 */, +/* 425 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const fs = __webpack_require__(869) +const path = __webpack_require__(622) +const util = __webpack_require__(669) +const atLeastNode = __webpack_require__(159) + +const nodeSupportsBigInt = atLeastNode('10.5.0') +const stat = (file) => nodeSupportsBigInt ? fs.stat(file, { bigint: true }) : fs.stat(file) +const statSync = (file) => nodeSupportsBigInt ? fs.statSync(file, { bigint: true }) : fs.statSync(file) + +function getStats (src, dest) { + return Promise.all([ + stat(src), + stat(dest).catch(err => { + if (err.code === 'ENOENT') return null + throw err + }) + ]).then(([srcStat, destStat]) => ({ srcStat, destStat })) +} + +function getStatsSync (src, dest) { + let destStat + const srcStat = statSync(src) + try { + destStat = statSync(dest) + } catch (err) { + if (err.code === 'ENOENT') return { srcStat, destStat: null } + throw err + } + return { srcStat, destStat } +} + +function checkPaths (src, dest, funcName, cb) { + util.callbackify(getStats)(src, dest, (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + if (destStat && areIdentical(srcStat, destStat)) { + return cb(new Error('Source and destination must not be the same.')) + } + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return cb(null, { srcStat, destStat }) + }) +} + +function checkPathsSync (src, dest, funcName) { + const { srcStat, destStat } = getStatsSync(src, dest) + if (destStat && areIdentical(srcStat, destStat)) { + throw new Error('Source and destination must not be the same.') + } + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new Error(errMsg(src, dest, funcName)) + } + return { srcStat, destStat } +} + +// recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +function checkParentPaths (src, srcStat, dest, funcName, cb) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return cb() + const callback = (err, destStat) => { + if (err) { + if (err.code === 'ENOENT') return cb() + return cb(err) + } + if (areIdentical(srcStat, destStat)) { + return cb(new Error(errMsg(src, dest, funcName))) + } + return checkParentPaths(src, srcStat, destParent, funcName, cb) + } + if (nodeSupportsBigInt) fs.stat(destParent, { bigint: true }, callback) + else fs.stat(destParent, callback) +} + +function checkParentPathsSync (src, srcStat, dest, funcName) { + const srcParent = path.resolve(path.dirname(src)) + const destParent = path.resolve(path.dirname(dest)) + if (destParent === srcParent || destParent === path.parse(destParent).root) return + let destStat + try { + destStat = statSync(destParent) + } catch (err) { + if (err.code === 'ENOENT') return + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new Error(errMsg(src, dest, funcName)) + } + return checkParentPathsSync(src, srcStat, destParent, funcName) +} + +function areIdentical (srcStat, destStat) { + if (destStat.ino && destStat.dev && destStat.ino === srcStat.ino && destStat.dev === srcStat.dev) { + if (nodeSupportsBigInt || destStat.ino < Number.MAX_SAFE_INTEGER) { + // definitive answer + return true + } + // Use additional heuristics if we can't use 'bigint'. + // Different 'ino' could be represented the same if they are >= Number.MAX_SAFE_INTEGER + // See issue 657 + if (destStat.size === srcStat.size && + destStat.mode === srcStat.mode && + destStat.nlink === srcStat.nlink && + destStat.atimeMs === srcStat.atimeMs && + destStat.mtimeMs === srcStat.mtimeMs && + destStat.ctimeMs === srcStat.ctimeMs && + destStat.birthtimeMs === srcStat.birthtimeMs) { + // heuristic answer + return true + } + } + return false +} + +// return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = path.resolve(src).split(path.sep).filter(i => i) + const destArr = path.resolve(dest).split(path.sep).filter(i => i) + return srcArr.reduce((acc, cur, i) => acc && destArr[i] === cur, true) +} + +function errMsg (src, dest, funcName) { + return `Cannot ${funcName} '${src}' to a subdirectory of itself, '${dest}'.` +} + +module.exports = { + checkPaths, + checkPathsSync, + checkParentPaths, + checkParentPathsSync, + isSrcSubdir +} + + +/***/ }), +/* 426 */, +/* 427 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +// Older verions of Node.js might not have `util.getSystemErrorName()`. +// In that case, fall back to a deprecated internal. +const util = __webpack_require__(669); + +let uv; + +if (typeof util.getSystemErrorName === 'function') { + module.exports = util.getSystemErrorName; +} else { + try { + uv = process.binding('uv'); + + if (typeof uv.errname !== 'function') { + throw new TypeError('uv.errname is not a function'); + } + } catch (err) { + console.error('execa/lib/errname: unable to establish process.binding(\'uv\')', err); + uv = null; + } + + module.exports = code => errname(uv, code); +} + +// Used for testing the fallback behavior +module.exports.__test__ = errname; + +function errname(uv, code) { + if (uv) { + return uv.errname(code); + } + + if (!(code < 0)) { + throw new Error('err >= 0'); + } + + return `Unknown system error ${code}`; +} + + + +/***/ }), +/* 428 */, +/* 429 */, +/* 430 */, +/* 431 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +const utils_1 = __webpack_require__(394); +/** + * Commands + * + * Command Format: + * ::name key=value,key=value::message + * + * Examples: + * ::warning::This is the message + * ::set-env name=MY_VAR::some value + */ +function issueCommand(command, properties, message) { + const cmd = new Command(command, properties, message); + process.stdout.write(cmd.toString() + os.EOL); +} +exports.issueCommand = issueCommand; +function issue(name, message = '') { + issueCommand(name, {}, message); +} +exports.issue = issue; +const CMD_STRING = '::'; +class Command { + constructor(command, properties, message) { + if (!command) { + command = 'missing.command'; + } + this.command = command; + this.properties = properties; + this.message = message; + } + toString() { + let cmdStr = CMD_STRING + this.command; + if (this.properties && Object.keys(this.properties).length > 0) { + cmdStr += ' '; + let first = true; + for (const key in this.properties) { + if (this.properties.hasOwnProperty(key)) { + const val = this.properties[key]; + if (val) { + if (first) { + first = false; + } + else { + cmdStr += ','; + } + cmdStr += `${key}=${escapeProperty(val)}`; + } + } + } + } + cmdStr += `${CMD_STRING}${escapeData(this.message)}`; + return cmdStr; + } +} +function escapeData(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A'); +} +function escapeProperty(s) { + return utils_1.toCommandValue(s) + .replace(/%/g, '%25') + .replace(/\r/g, '%0D') + .replace(/\n/g, '%0A') + .replace(/:/g, '%3A') + .replace(/,/g, '%2C'); +} +//# sourceMappingURL=command.js.map + +/***/ }), +/* 432 */, +/* 433 */, +/* 434 */, +/* 435 */, +/* 436 */, +/* 437 */, +/* 438 */, +/* 439 */, +/* 440 */, +/* 441 */, +/* 442 */, +/* 443 */ +/***/ (function(module) { + +function getBuild(project, buildConfiguration) { + const build = buildConfiguration.build + ? overrideProperties( + buildConfiguration.default, + buildConfiguration.build.find( + projectBuild => project === projectBuild.project + ) + ) + : buildConfiguration.default; + delete build.project; + return manipulateArchiveArtifacts(manipulateProperties(build), project); +} + +/** + * Override properties from one object to another + * + * @param {Object} target + * @param {Object} source + */ +function overrideProperties(target, source) { + const targetClone = { ...target }; + const sourceClone = { ...source }; + Object.entries(targetClone) + .filter(([key]) => sourceClone[key]) + .forEach(([key, value]) => { + if (typeof value === "object") { + targetClone[key] = overrideProperties( + targetClone[key], + sourceClone[key] + ); + } else { + targetClone[key] = sourceClone[key]; } - }], - mapCommitAuthor: ["PATCH /repos/{owner}/{repo}/import/authors/{author_id}"], - setLfsPreference: ["PATCH /repos/{owner}/{repo}/import/lfs"], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: ["PUT /repos/{owner}/{repo}/import"], - unlockRepoForAuthenticatedUser: ["DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock", { - mediaType: { - previews: ["wyandotte"] + }); + + Object.keys(sourceClone) + .filter(key => !targetClone[key]) + .forEach(key => (targetClone[key] = sourceClone[key])); + return targetClone; +} + +/** + * manipulates certains properties from an object. It basically converts String lists to Arrays + * @param {Object} properties the properties to manipulate + */ +function manipulateProperties(properties) { + const propertiesClone = { ...properties }; + Object.entries(propertiesClone).forEach(([key, value]) => { + if (typeof value === "object" && !Array.isArray(value)) { + propertiesClone[key] = manipulateProperties(propertiesClone[key]); + } else { + propertiesClone[key] = + typeof value === "string" && value.includes("\n") + ? value.split("\n").filter(e => e) + : value; + } + }); + return propertiesClone; +} + +/** + * manipulates `archive-artifacts` properties + * @param {Object} build the node's build section + * @param {String} project the project name + */ +function manipulateArchiveArtifacts(build, project) { + const archiveArtifacts = build["archive-artifacts"]; + if (archiveArtifacts) { + archiveArtifacts["if-no-files-found"] = build["archive-artifacts"][ + "if-no-files-found" + ] + ? archiveArtifacts["if-no-files-found"] + : "warn"; + archiveArtifacts.dependencies = archiveArtifacts.dependencies + ? archiveArtifacts.dependencies + : "none"; + archiveArtifacts.name = archiveArtifacts.name + ? archiveArtifacts.name + : project.includes("/") + ? project.split("/")[1] + : project; + archiveArtifacts.paths = treatArchiveArtifactsPath(archiveArtifacts.path); + } + return build; +} + +function treatArchiveArtifactsPath(archiveArtifactsPath) { + return (Array.isArray(archiveArtifactsPath) + ? archiveArtifactsPath + : archiveArtifactsPath.split("\n") + ) + .filter(line => line) + .reduce((acc, pathExpression) => { + acc.push(convertPathExpressionToPath(pathExpression)); + return acc; + }, []); +} + +function convertPathExpressionToPath(pathExpression) { + const match = pathExpression.match(/([^@]*)@?(always|success|failure)?/); + return match + ? { + path: match[1], + on: match[2] ? match[2] : "success" } - }], - unlockRepoForOrg: ["DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock", { + : pathExpression; +} + +function treatProject(project, buildConfiguration) { + return { build: getBuild(project, buildConfiguration) }; +} + +module.exports = { treatProject }; + + +/***/ }), +/* 444 */, +/* 445 */, +/* 446 */, +/* 447 */, +/* 448 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +var universalUserAgent = __webpack_require__(796); +var beforeAfterHook = __webpack_require__(523); +var request = __webpack_require__(753); +var graphql = __webpack_require__(898); +var authToken = __webpack_require__(813); + +function _defineProperty(obj, key, value) { + if (key in obj) { + Object.defineProperty(obj, key, { + value: value, + enumerable: true, + configurable: true, + writable: true + }); + } else { + obj[key] = value; + } + + return obj; +} + +function ownKeys(object, enumerableOnly) { + var keys = Object.keys(object); + + if (Object.getOwnPropertySymbols) { + var symbols = Object.getOwnPropertySymbols(object); + if (enumerableOnly) symbols = symbols.filter(function (sym) { + return Object.getOwnPropertyDescriptor(object, sym).enumerable; + }); + keys.push.apply(keys, symbols); + } + + return keys; +} + +function _objectSpread2(target) { + for (var i = 1; i < arguments.length; i++) { + var source = arguments[i] != null ? arguments[i] : {}; + + if (i % 2) { + ownKeys(Object(source), true).forEach(function (key) { + _defineProperty(target, key, source[key]); + }); + } else if (Object.getOwnPropertyDescriptors) { + Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)); + } else { + ownKeys(Object(source)).forEach(function (key) { + Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key)); + }); + } + } + + return target; +} + +const VERSION = "2.5.4"; + +class Octokit { + constructor(options = {}) { + const hook = new beforeAfterHook.Collection(); + const requestDefaults = { + baseUrl: request.request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + hook: hook.bind(null, "request") + }), mediaType: { - previews: ["wyandotte"] + previews: [], + format: "" } - }], - updateImport: ["PATCH /repos/{owner}/{repo}/import"] - }, - orgs: { - addOrUpdateMembership: ["PUT /orgs/{org}/memberships/{username}", {}, { - renamed: ["orgs", "setMembershipForUser"] - }], - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembership: ["GET /orgs/{org}/members/{username}", {}, { - renamed: ["orgs", "checkMembershipForUser"] - }], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembership: ["GET /orgs/{org}/public_members/{username}", {}, { - renamed: ["orgs", "checkPublicMembershipForUser"] - }], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - concealMembership: ["DELETE /orgs/{org}/public_members/{username}", {}, { - renamed: ["orgs", "removePublicMembershipForAuthenticatedUser"] - }], - convertMemberToOutsideCollaborator: ["PUT /orgs/{org}/outside_collaborators/{username}"], - createHook: ["POST /orgs/{org}/hooks", {}, { - renamed: ["orgs", "createWebhook"] - }], - createInvitation: ["POST /orgs/{org}/invitations"], - createWebhook: ["POST /orgs/{org}/hooks"], - deleteHook: ["DELETE /orgs/{org}/hooks/{hook_id}", {}, { - renamed: ["orgs", "deleteWebhook"] - }], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - get: ["GET /orgs/{org}"], - getHook: ["GET /orgs/{org}/hooks/{hook_id}", {}, { - renamed: ["orgs", "getWebhook"] - }], - getMembership: ["GET /orgs/{org}/memberships/{username}", {}, { - renamed: ["orgs", "getMembershipForUser"] - }], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations", { - mediaType: { - previews: ["machine-man"] + }; // prepend default user agent with `options.userAgent` if set + + requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" "); + + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + + this.request = request.request.defaults(requestDefaults); + this.graphql = graphql.withCustomRequest(this.request).defaults(_objectSpread2(_objectSpread2({}, requestDefaults), {}, { + baseUrl: requestDefaults.baseUrl.replace(/\/api\/v3$/, "/api") + })); + this.log = Object.assign({ + debug: () => {}, + info: () => {}, + warn: console.warn.bind(console), + error: console.error.bind(console) + }, options.log); + this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance + // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registred. + // (2) If only `options.auth` is set, use the default token authentication strategy. + // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance. + // TODO: type `options.auth` based on `options.authStrategy`. + + if (!options.authStrategy) { + if (!options.auth) { + // (1) + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + // (2) + const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; } - }], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listHooks: ["GET /orgs/{org}/hooks", {}, { - renamed: ["orgs", "listWebhooks"] - }], - listInstallations: ["GET /orgs/{org}/installations", { - mediaType: { - previews: ["machine-man"] + } else { + const auth = options.authStrategy(Object.assign({ + request: this.request + }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯ + + hook.wrap("request", auth.hook); + this.auth = auth; + } // apply plugins + // https://stackoverflow.com/a/16345172 + + + const classConstructor = this.constructor; + classConstructor.plugins.forEach(plugin => { + Object.assign(this, plugin(this, options)); + }); + } + + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null)); } - }, { - renamed: ["orgs", "listAppInstallations"] - }], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMemberships: ["GET /user/memberships/orgs", {}, { - renamed: ["orgs", "listMembershipsForAuthenticatedUser"] - }], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listWebhooks: ["GET /orgs/{org}/hooks"], - pingHook: ["POST /orgs/{org}/hooks/{hook_id}/pings", {}, { - renamed: ["orgs", "pingWebhook"] - }], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - publicizeMembership: ["PUT /orgs/{org}/public_members/{username}", {}, { - renamed: ["orgs", "setPublicMembershipForAuthenticatedUser"] - }], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembership: ["DELETE /orgs/{org}/memberships/{username}", {}, { - renamed: ["orgs", "removeMembershipForUser"] - }], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: ["DELETE /orgs/{org}/outside_collaborators/{username}"], - removePublicMembershipForAuthenticatedUser: ["DELETE /orgs/{org}/public_members/{username}"], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: ["PUT /orgs/{org}/public_members/{username}"], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateHook: ["PATCH /orgs/{org}/hooks/{hook_id}", {}, { - renamed: ["orgs", "updateWebhook"] - }], - updateMembership: ["PATCH /user/memberships/orgs/{org}", {}, { - renamed: ["orgs", "updateMembershipForAuthenticatedUser"] - }], - updateMembershipForAuthenticatedUser: ["PATCH /user/memberships/orgs/{org}"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"] + + }; + return OctokitWithDefaults; + } + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + + + static plugin(p1, ...p2) { + var _a; + + if (p1 instanceof Array) { + console.warn(["Passing an array of plugins to Octokit.plugin() has been deprecated.", "Instead of:", " Octokit.plugin([plugin1, plugin2, ...])", "Use:", " Octokit.plugin(plugin1, plugin2, ...)"].join("\n")); + } + + const currentPlugins = this.plugins; + let newPlugins = [...(p1 instanceof Array ? p1 : [p1]), ...p2]; + const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a); + return NewOctokit; + } + +} +Octokit.VERSION = VERSION; +Octokit.plugins = []; + +exports.Octokit = Octokit; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 449 */, +/* 450 */, +/* 451 */, +/* 452 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const utils_1 = __webpack_require__(870); +/** + * Used for managing http clients during either upload or download + */ +class HttpManager { + constructor(clientCount, userAgent) { + if (clientCount < 1) { + throw new Error('There must be at least one client'); + } + this.userAgent = userAgent; + this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); + } + getClient(index) { + return this.clients[index]; + } + // client disposal is necessary if a keep-alive connection is used to properly close the connection + // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 + disposeAndReplaceClient(index) { + this.clients[index].dispose(); + this.clients[index] = utils_1.createHttpClient(this.userAgent); + } + disposeAndReplaceAllClients() { + for (const [index] of this.clients.entries()) { + this.disposeAndReplaceClient(index); + } + } +} +exports.HttpManager = HttpManager; +//# sourceMappingURL=http-manager.js.map + +/***/ }), +/* 453 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(49) +var eos = __webpack_require__(562) +var fs = __webpack_require__(747) // we only need fs to get the ReadStream and WriteStream prototypes + +var noop = function () {} +var ancient = /^v?\.0/.test(process.version) + +var isFn = function (fn) { + return typeof fn === 'function' +} + +var isFS = function (stream) { + if (!ancient) return false // newer node version do not need to care about fs is a special way + if (!fs) return false // browser + return (stream instanceof (fs.ReadStream || noop) || stream instanceof (fs.WriteStream || noop)) && isFn(stream.close) +} + +var isRequest = function (stream) { + return stream.setHeader && isFn(stream.abort) +} + +var destroyer = function (stream, reading, writing, callback) { + callback = once(callback) + + var closed = false + stream.on('close', function () { + closed = true + }) + + eos(stream, {readable: reading, writable: writing}, function (err) { + if (err) return callback(err) + closed = true + callback() + }) + + var destroyed = false + return function (err) { + if (closed) return + if (destroyed) return + destroyed = true + + if (isFS(stream)) return stream.close(noop) // use close for fs streams to avoid fd leaks + if (isRequest(stream)) return stream.abort() // request.destroy just do .end - .abort is what we want + + if (isFn(stream.destroy)) return stream.destroy() + + callback(err || new Error('stream was destroyed')) + } +} + +var call = function (fn) { + fn() +} + +var pipe = function (from, to) { + return from.pipe(to) +} + +var pump = function () { + var streams = Array.prototype.slice.call(arguments) + var callback = isFn(streams[streams.length - 1] || noop) && streams.pop() || noop + + if (Array.isArray(streams[0])) streams = streams[0] + if (streams.length < 2) throw new Error('pump requires two streams per minimum') + + var error + var destroys = streams.map(function (stream, i) { + var reading = i < streams.length - 1 + var writing = i > 0 + return destroyer(stream, reading, writing, function (err) { + if (!error) error = err + if (err) destroys.forEach(call) + if (reading) return + destroys.forEach(call) + callback(error) + }) + }) + + return streams.reduce(pipe) +} + +module.exports = pump + + +/***/ }), +/* 454 */ +/***/ (function(module, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var Stream = _interopDefault(__webpack_require__(794)); +var http = _interopDefault(__webpack_require__(605)); +var Url = _interopDefault(__webpack_require__(835)); +var https = _interopDefault(__webpack_require__(211)); +var zlib = _interopDefault(__webpack_require__(761)); + +// Based on https://github.com/tmpvar/jsdom/blob/aa85b2abf07766ff7bf5c1f6daafb3726f2f2db5/lib/jsdom/living/blob.js + +// fix for "Readable" isn't a named export issue +const Readable = Stream.Readable; + +const BUFFER = Symbol('buffer'); +const TYPE = Symbol('type'); + +class Blob { + constructor() { + this[TYPE] = ''; + + const blobParts = arguments[0]; + const options = arguments[1]; + + const buffers = []; + let size = 0; + + if (blobParts) { + const a = blobParts; + const length = Number(a.length); + for (let i = 0; i < length; i++) { + const element = a[i]; + let buffer; + if (element instanceof Buffer) { + buffer = element; + } else if (ArrayBuffer.isView(element)) { + buffer = Buffer.from(element.buffer, element.byteOffset, element.byteLength); + } else if (element instanceof ArrayBuffer) { + buffer = Buffer.from(element); + } else if (element instanceof Blob) { + buffer = element[BUFFER]; + } else { + buffer = Buffer.from(typeof element === 'string' ? element : String(element)); + } + size += buffer.length; + buffers.push(buffer); + } + } + + this[BUFFER] = Buffer.concat(buffers); + + let type = options && options.type !== undefined && String(options.type).toLowerCase(); + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type; + } + } + get size() { + return this[BUFFER].length; + } + get type() { + return this[TYPE]; + } + text() { + return Promise.resolve(this[BUFFER].toString()); + } + arrayBuffer() { + const buf = this[BUFFER]; + const ab = buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + return Promise.resolve(ab); + } + stream() { + const readable = new Readable(); + readable._read = function () {}; + readable.push(this[BUFFER]); + readable.push(null); + return readable; + } + toString() { + return '[object Blob]'; + } + slice() { + const size = this.size; + + const start = arguments[0]; + const end = arguments[1]; + let relativeStart, relativeEnd; + if (start === undefined) { + relativeStart = 0; + } else if (start < 0) { + relativeStart = Math.max(size + start, 0); + } else { + relativeStart = Math.min(start, size); + } + if (end === undefined) { + relativeEnd = size; + } else if (end < 0) { + relativeEnd = Math.max(size + end, 0); + } else { + relativeEnd = Math.min(end, size); + } + const span = Math.max(relativeEnd - relativeStart, 0); + + const buffer = this[BUFFER]; + const slicedBuffer = buffer.slice(relativeStart, relativeStart + span); + const blob = new Blob([], { type: arguments[2] }); + blob[BUFFER] = slicedBuffer; + return blob; + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, + slice: { enumerable: true } +}); + +Object.defineProperty(Blob.prototype, Symbol.toStringTag, { + value: 'Blob', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * fetch-error.js + * + * FetchError interface for operational errors + */ + +/** + * Create FetchError instance + * + * @param String message Error message for human + * @param String type Error type for machine + * @param String systemError For Node.js system error + * @return FetchError + */ +function FetchError(message, type, systemError) { + Error.call(this, message); + + this.message = message; + this.type = type; + + // when err.type is `system`, err.code contains system error code + if (systemError) { + this.code = this.errno = systemError.code; + } + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +FetchError.prototype = Object.create(Error.prototype); +FetchError.prototype.constructor = FetchError; +FetchError.prototype.name = 'FetchError'; + +let convert; +try { + convert = __webpack_require__(18).convert; +} catch (e) {} + +const INTERNALS = Symbol('Body internals'); + +// fix an issue where "PassThrough" isn't a named export for node <10 +const PassThrough = Stream.PassThrough; + +/** + * Body mixin + * + * Ref: https://fetch.spec.whatwg.org/#body + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +function Body(body) { + var _this = this; + + var _ref = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}, + _ref$size = _ref.size; + + let size = _ref$size === undefined ? 0 : _ref$size; + var _ref$timeout = _ref.timeout; + let timeout = _ref$timeout === undefined ? 0 : _ref$timeout; + + if (body == null) { + // body is undefined or null + body = null; + } else if (isURLSearchParams(body)) { + // body is a URLSearchParams + body = Buffer.from(body.toString()); + } else if (isBlob(body)) ; else if (Buffer.isBuffer(body)) ; else if (Object.prototype.toString.call(body) === '[object ArrayBuffer]') { + // body is ArrayBuffer + body = Buffer.from(body); + } else if (ArrayBuffer.isView(body)) { + // body is ArrayBufferView + body = Buffer.from(body.buffer, body.byteOffset, body.byteLength); + } else if (body instanceof Stream) ; else { + // none of the above + // coerce to string then buffer + body = Buffer.from(String(body)); + } + this[INTERNALS] = { + body, + disturbed: false, + error: null + }; + this.size = size; + this.timeout = timeout; + + if (body instanceof Stream) { + body.on('error', function (err) { + const error = err.name === 'AbortError' ? err : new FetchError(`Invalid response body while trying to fetch ${_this.url}: ${err.message}`, 'system', err); + _this[INTERNALS].error = error; + }); + } +} + +Body.prototype = { + get body() { + return this[INTERNALS].body; + }, + + get bodyUsed() { + return this[INTERNALS].disturbed; + }, + + /** + * Decode response as ArrayBuffer + * + * @return Promise + */ + arrayBuffer() { + return consumeBody.call(this).then(function (buf) { + return buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength); + }); + }, + + /** + * Return raw response as Blob + * + * @return Promise + */ + blob() { + let ct = this.headers && this.headers.get('content-type') || ''; + return consumeBody.call(this).then(function (buf) { + return Object.assign( + // Prevent copying + new Blob([], { + type: ct.toLowerCase() + }), { + [BUFFER]: buf + }); + }); + }, + + /** + * Decode response as json + * + * @return Promise + */ + json() { + var _this2 = this; + + return consumeBody.call(this).then(function (buffer) { + try { + return JSON.parse(buffer.toString()); + } catch (err) { + return Body.Promise.reject(new FetchError(`invalid json response body at ${_this2.url} reason: ${err.message}`, 'invalid-json')); + } + }); + }, + + /** + * Decode response as text + * + * @return Promise + */ + text() { + return consumeBody.call(this).then(function (buffer) { + return buffer.toString(); + }); + }, + + /** + * Decode response as buffer (non-spec api) + * + * @return Promise + */ + buffer() { + return consumeBody.call(this); + }, + + /** + * Decode response as text, while automatically detecting the encoding and + * trying to decode to UTF-8 (non-spec api) + * + * @return Promise + */ + textConverted() { + var _this3 = this; + + return consumeBody.call(this).then(function (buffer) { + return convertBody(buffer, _this3.headers); + }); + } +}; + +// In browsers, all properties are enumerable. +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true } +}); + +Body.mixIn = function (proto) { + for (const name of Object.getOwnPropertyNames(Body.prototype)) { + // istanbul ignore else: future proof + if (!(name in proto)) { + const desc = Object.getOwnPropertyDescriptor(Body.prototype, name); + Object.defineProperty(proto, name, desc); + } + } +}; + +/** + * Consume and convert an entire Body to a Buffer. + * + * Ref: https://fetch.spec.whatwg.org/#concept-body-consume-body + * + * @return Promise + */ +function consumeBody() { + var _this4 = this; + + if (this[INTERNALS].disturbed) { + return Body.Promise.reject(new TypeError(`body used already for: ${this.url}`)); + } + + this[INTERNALS].disturbed = true; + + if (this[INTERNALS].error) { + return Body.Promise.reject(this[INTERNALS].error); + } + + let body = this.body; + + // body is null + if (body === null) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is blob + if (isBlob(body)) { + body = body.stream(); + } + + // body is buffer + if (Buffer.isBuffer(body)) { + return Body.Promise.resolve(body); + } + + // istanbul ignore if: should never happen + if (!(body instanceof Stream)) { + return Body.Promise.resolve(Buffer.alloc(0)); + } + + // body is stream + // get ready to actually consume the body + let accum = []; + let accumBytes = 0; + let abort = false; + + return new Body.Promise(function (resolve, reject) { + let resTimeout; + + // allow timeout on slow response body + if (_this4.timeout) { + resTimeout = setTimeout(function () { + abort = true; + reject(new FetchError(`Response timeout while trying to fetch ${_this4.url} (over ${_this4.timeout}ms)`, 'body-timeout')); + }, _this4.timeout); + } + + // handle stream errors + body.on('error', function (err) { + if (err.name === 'AbortError') { + // if the request was aborted, reject with this Error + abort = true; + reject(err); + } else { + // other errors, such as incorrect content-encoding + reject(new FetchError(`Invalid response body while trying to fetch ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + + body.on('data', function (chunk) { + if (abort || chunk === null) { + return; + } + + if (_this4.size && accumBytes + chunk.length > _this4.size) { + abort = true; + reject(new FetchError(`content size at ${_this4.url} over limit: ${_this4.size}`, 'max-size')); + return; + } + + accumBytes += chunk.length; + accum.push(chunk); + }); + + body.on('end', function () { + if (abort) { + return; + } + + clearTimeout(resTimeout); + + try { + resolve(Buffer.concat(accum, accumBytes)); + } catch (err) { + // handle streams that have accumulated too much data (issue #414) + reject(new FetchError(`Could not create Buffer from response body for ${_this4.url}: ${err.message}`, 'system', err)); + } + }); + }); +} + +/** + * Detect buffer encoding and convert to target encoding + * ref: http://www.w3.org/TR/2011/WD-html5-20110113/parsing.html#determining-the-character-encoding + * + * @param Buffer buffer Incoming buffer + * @param String encoding Target encoding + * @return String + */ +function convertBody(buffer, headers) { + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function'); + } + + const ct = headers.get('content-type'); + let charset = 'utf-8'; + let res, str; + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct); + } + + // no charset in content type, peek at response body for at most 1024 bytes + str = buffer.slice(0, 1024).toString(); + + // html5 + if (!res && str) { + res = / 0 && arguments[0] !== undefined ? arguments[0] : undefined; + + this[MAP] = Object.create(null); + + if (init instanceof Headers) { + const rawHeaders = init.raw(); + const headerNames = Object.keys(rawHeaders); + + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value); + } + } + + return; + } + + // We don't worry about converting prop to ByteString here as append() + // will handle it. + if (init == null) ; else if (typeof init === 'object') { + const method = init[Symbol.iterator]; + if (method != null) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable'); + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = []; + for (const pair of init) { + if (typeof pair !== 'object' || typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable'); + } + pairs.push(Array.from(pair)); + } + + for (const pair of pairs) { + if (pair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple'); + } + this.append(pair[0], pair[1]); + } + } else { + // record + for (const key of Object.keys(init)) { + const value = init[key]; + this.append(key, value); + } + } + } else { + throw new TypeError('Provided initializer must be an object'); + } + } + + /** + * Return combined header value given name + * + * @param String name Header name + * @return Mixed + */ + get(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key === undefined) { + return null; + } + + return this[MAP][key].join(', '); + } + + /** + * Iterate over all headers + * + * @param Function callback Executed for each item with parameters (value, name, thisArg) + * @param Boolean thisArg `this` context for callback function + * @return Void + */ + forEach(callback) { + let thisArg = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : undefined; + + let pairs = getHeaders(this); + let i = 0; + while (i < pairs.length) { + var _pairs$i = pairs[i]; + const name = _pairs$i[0], + value = _pairs$i[1]; + + callback.call(thisArg, value, name, this); + pairs = getHeaders(this); + i++; + } + } + + /** + * Overwrite header values given name + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + set(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + this[MAP][key !== undefined ? key : name] = [value]; + } + + /** + * Append a value onto existing header + * + * @param String name Header name + * @param String value Header value + * @return Void + */ + append(name, value) { + name = `${name}`; + value = `${value}`; + validateName(name); + validateValue(value); + const key = find(this[MAP], name); + if (key !== undefined) { + this[MAP][key].push(value); + } else { + this[MAP][name] = [value]; + } + } + + /** + * Check for header name existence + * + * @param String name Header name + * @return Boolean + */ + has(name) { + name = `${name}`; + validateName(name); + return find(this[MAP], name) !== undefined; + } + + /** + * Delete all header values given name + * + * @param String name Header name + * @return Void + */ + delete(name) { + name = `${name}`; + validateName(name); + const key = find(this[MAP], name); + if (key !== undefined) { + delete this[MAP][key]; + } + } + + /** + * Return raw headers (non-spec api) + * + * @return Object + */ + raw() { + return this[MAP]; + } + + /** + * Get an iterator on keys. + * + * @return Iterator + */ + keys() { + return createHeadersIterator(this, 'key'); + } + + /** + * Get an iterator on values. + * + * @return Iterator + */ + values() { + return createHeadersIterator(this, 'value'); + } + + /** + * Get an iterator on entries. + * + * This is the default iterator of the Headers object. + * + * @return Iterator + */ + [Symbol.iterator]() { + return createHeadersIterator(this, 'key+value'); + } +} +Headers.prototype.entries = Headers.prototype[Symbol.iterator]; + +Object.defineProperty(Headers.prototype, Symbol.toStringTag, { + value: 'Headers', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true } +}); + +function getHeaders(headers) { + let kind = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : 'key+value'; + + const keys = Object.keys(headers[MAP]).sort(); + return keys.map(kind === 'key' ? function (k) { + return k.toLowerCase(); + } : kind === 'value' ? function (k) { + return headers[MAP][k].join(', '); + } : function (k) { + return [k.toLowerCase(), headers[MAP][k].join(', ')]; + }); +} + +const INTERNAL = Symbol('internal'); + +function createHeadersIterator(target, kind) { + const iterator = Object.create(HeadersIteratorPrototype); + iterator[INTERNAL] = { + target, + kind, + index: 0 + }; + return iterator; +} + +const HeadersIteratorPrototype = Object.setPrototypeOf({ + next() { + // istanbul ignore if + if (!this || Object.getPrototypeOf(this) !== HeadersIteratorPrototype) { + throw new TypeError('Value of `this` is not a HeadersIterator'); + } + + var _INTERNAL = this[INTERNAL]; + const target = _INTERNAL.target, + kind = _INTERNAL.kind, + index = _INTERNAL.index; + + const values = getHeaders(target, kind); + const len = values.length; + if (index >= len) { + return { + value: undefined, + done: true + }; + } + + this[INTERNAL].index = index + 1; + + return { + value: values[index], + done: false + }; + } +}, Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))); + +Object.defineProperty(HeadersIteratorPrototype, Symbol.toStringTag, { + value: 'HeadersIterator', + writable: false, + enumerable: false, + configurable: true +}); + +/** + * Export the Headers object in a form that Node.js can consume. + * + * @param Headers headers + * @return Object + */ +function exportNodeCompatibleHeaders(headers) { + const obj = Object.assign({ __proto__: null }, headers[MAP]); + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host'); + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0]; + } + + return obj; +} + +/** + * Create a Headers object from an object of headers, ignoring those that do + * not conform to HTTP grammar productions. + * + * @param Object obj Object of headers + * @return Headers + */ +function createHeadersLenient(obj) { + const headers = new Headers(); + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue; + } + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue; + } + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val]; + } else { + headers[MAP][name].push(val); + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]]; + } + } + return headers; +} + +const INTERNALS$1 = Symbol('Response internals'); + +// fix an issue where "STATUS_CODES" aren't a named export for node <10 +const STATUS_CODES = http.STATUS_CODES; + +/** + * Response class + * + * @param Stream body Readable stream + * @param Object opts Response options + * @return Void + */ +class Response { + constructor() { + let body = arguments.length > 0 && arguments[0] !== undefined ? arguments[0] : null; + let opts = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + Body.call(this, body, opts); + + const status = opts.status || 200; + const headers = new Headers(opts.headers); + + if (body != null && !headers.has('Content-Type')) { + const contentType = extractContentType(body); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + this[INTERNALS$1] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter + }; + } + + get url() { + return this[INTERNALS$1].url || ''; + } + + get status() { + return this[INTERNALS$1].status; + } + + /** + * Convenience property representing if the request ended normally + */ + get ok() { + return this[INTERNALS$1].status >= 200 && this[INTERNALS$1].status < 300; + } + + get redirected() { + return this[INTERNALS$1].counter > 0; + } + + get statusText() { + return this[INTERNALS$1].statusText; + } + + get headers() { + return this[INTERNALS$1].headers; + } + + /** + * Clone this response + * + * @return Response + */ + clone() { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected + }); + } +} + +Body.mixIn(Response.prototype); + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true } +}); + +Object.defineProperty(Response.prototype, Symbol.toStringTag, { + value: 'Response', + writable: false, + enumerable: false, + configurable: true +}); + +const INTERNALS$2 = Symbol('Request internals'); + +// fix an issue where "format", "parse" aren't a named export for node <10 +const parse_url = Url.parse; +const format_url = Url.format; + +const streamDestructionSupported = 'destroy' in Stream.Readable.prototype; + +/** + * Check if a value is an instance of Request. + * + * @param Mixed input + * @return Boolean + */ +function isRequest(input) { + return typeof input === 'object' && typeof input[INTERNALS$2] === 'object'; +} + +function isAbortSignal(signal) { + const proto = signal && typeof signal === 'object' && Object.getPrototypeOf(signal); + return !!(proto && proto.constructor.name === 'AbortSignal'); +} + +/** + * Request class + * + * @param Mixed input Url or Request instance + * @param Object init Custom options + * @return Void + */ +class Request { + constructor(input) { + let init = arguments.length > 1 && arguments[1] !== undefined ? arguments[1] : {}; + + let parsedURL; + + // normalize input + if (!isRequest(input)) { + if (input && input.href) { + // in order to support Node.js' Url objects; though WHATWG's URL objects + // will fall into this branch also (since their `toString()` will return + // `href` property anyway) + parsedURL = parse_url(input.href); + } else { + // coerce input to a string before attempting to parse + parsedURL = parse_url(`${input}`); + } + input = {}; + } else { + parsedURL = parse_url(input.url); + } + + let method = init.method || input.method || 'GET'; + method = method.toUpperCase(); + + if ((init.body != null || isRequest(input) && input.body !== null) && (method === 'GET' || method === 'HEAD')) { + throw new TypeError('Request with GET/HEAD method cannot have body'); + } + + let inputBody = init.body != null ? init.body : isRequest(input) && input.body !== null ? clone(input) : null; + + Body.call(this, inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0 + }); + + const headers = new Headers(init.headers || input.headers || {}); + + if (inputBody != null && !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody); + if (contentType) { + headers.append('Content-Type', contentType); + } + } + + let signal = isRequest(input) ? input.signal : null; + if ('signal' in init) signal = init.signal; + + if (signal != null && !isAbortSignal(signal)) { + throw new TypeError('Expected signal to be an instanceof AbortSignal'); + } + + this[INTERNALS$2] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal + }; + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow : input.follow !== undefined ? input.follow : 20; + this.compress = init.compress !== undefined ? init.compress : input.compress !== undefined ? input.compress : true; + this.counter = init.counter || input.counter || 0; + this.agent = init.agent || input.agent; + } + + get method() { + return this[INTERNALS$2].method; + } + + get url() { + return format_url(this[INTERNALS$2].parsedURL); + } + + get headers() { + return this[INTERNALS$2].headers; + } + + get redirect() { + return this[INTERNALS$2].redirect; + } + + get signal() { + return this[INTERNALS$2].signal; + } + + /** + * Clone this request + * + * @return Request + */ + clone() { + return new Request(this); + } +} + +Body.mixIn(Request.prototype); + +Object.defineProperty(Request.prototype, Symbol.toStringTag, { + value: 'Request', + writable: false, + enumerable: false, + configurable: true +}); + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true } +}); + +/** + * Convert a Request to Node.js http request options. + * + * @param Request A Request instance + * @return Object The options object to be passed to http.request + */ +function getNodeRequestOptions(request) { + const parsedURL = request[INTERNALS$2].parsedURL; + const headers = new Headers(request[INTERNALS$2].headers); + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*'); + } + + // Basic fetch + if (!parsedURL.protocol || !parsedURL.hostname) { + throw new TypeError('Only absolute URLs are supported'); + } + + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported'); + } + + if (request.signal && request.body instanceof Stream.Readable && !streamDestructionSupported) { + throw new Error('Cancellation of streamed requests with AbortSignal is not supported in node < 8'); + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + let contentLengthValue = null; + if (request.body == null && /^(POST|PUT)$/i.test(request.method)) { + contentLengthValue = '0'; + } + if (request.body != null) { + const totalBytes = getTotalBytes(request); + if (typeof totalBytes === 'number') { + contentLengthValue = String(totalBytes); + } + } + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue); + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', 'node-fetch/1.0 (+https://github.com/bitinn/node-fetch)'); + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate'); + } + + let agent = request.agent; + if (typeof agent === 'function') { + agent = agent(parsedURL); + } + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close'); + } + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + return Object.assign({}, parsedURL, { + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent + }); +} + +/** + * abort-error.js + * + * AbortError interface for cancelled requests + */ + +/** + * Create AbortError instance + * + * @param String message Error message for human + * @return AbortError + */ +function AbortError(message) { + Error.call(this, message); + + this.type = 'aborted'; + this.message = message; + + // hide custom error implementation details from end-users + Error.captureStackTrace(this, this.constructor); +} + +AbortError.prototype = Object.create(Error.prototype); +AbortError.prototype.constructor = AbortError; +AbortError.prototype.name = 'AbortError'; + +// fix an issue where "PassThrough", "resolve" aren't a named export for node <10 +const PassThrough$1 = Stream.PassThrough; +const resolve_url = Url.resolve; + +/** + * Fetch function + * + * @param Mixed url Absolute url or Request instance + * @param Object opts Fetch options + * @return Promise + */ +function fetch(url, opts) { + + // allow custom promise + if (!fetch.Promise) { + throw new Error('native promise missing, set fetch.Promise to your favorite alternative'); + } + + Body.Promise = fetch.Promise; + + // wrap http.request into fetch + return new fetch.Promise(function (resolve, reject) { + // build request object + const request = new Request(url, opts); + const options = getNodeRequestOptions(request); + + const send = (options.protocol === 'https:' ? https : http).request; + const signal = request.signal; + + let response = null; + + const abort = function abort() { + let error = new AbortError('The user aborted a request.'); + reject(error); + if (request.body && request.body instanceof Stream.Readable) { + request.body.destroy(error); + } + if (!response || !response.body) return; + response.body.emit('error', error); + }; + + if (signal && signal.aborted) { + abort(); + return; + } + + const abortAndFinalize = function abortAndFinalize() { + abort(); + finalize(); + }; + + // send request + const req = send(options); + let reqTimeout; + + if (signal) { + signal.addEventListener('abort', abortAndFinalize); + } + + function finalize() { + req.abort(); + if (signal) signal.removeEventListener('abort', abortAndFinalize); + clearTimeout(reqTimeout); + } + + if (request.timeout) { + req.once('socket', function (socket) { + reqTimeout = setTimeout(function () { + reject(new FetchError(`network timeout at: ${request.url}`, 'request-timeout')); + finalize(); + }, request.timeout); + }); + } + + req.on('error', function (err) { + reject(new FetchError(`request to ${request.url} failed, reason: ${err.message}`, 'system', err)); + finalize(); + }); + + req.on('response', function (res) { + clearTimeout(reqTimeout); + + const headers = createHeadersLenient(res.headers); + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location'); + + // HTTP fetch step 5.3 + const locationURL = location === null ? null : resolve_url(request.url, location); + + // HTTP fetch step 5.5 + switch (request.redirect) { + case 'error': + reject(new FetchError(`uri requested responds with a redirect, redirect mode is set to error: ${request.url}`, 'no-redirect')); + finalize(); + return; + case 'manual': + // node-fetch-specific step: make manual redirect a bit easier to use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL); + } catch (err) { + // istanbul ignore next: nodejs server prevent invalid response headers, we can't test this through normal request + reject(err); + } + } + break; + case 'follow': + // HTTP-redirect fetch step 2 + if (locationURL === null) { + break; + } + + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${request.url}`, 'max-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + size: request.size + }; + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && request.body && getTotalBytes(request) === null) { + reject(new FetchError('Cannot follow redirect with body being a readable stream', 'unsupported-redirect')); + finalize(); + return; + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || (res.statusCode === 301 || res.statusCode === 302) && request.method === 'POST') { + requestOpts.method = 'GET'; + requestOpts.body = undefined; + requestOpts.headers.delete('content-length'); + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))); + finalize(); + return; + } + } + + // prepare response + res.once('end', function () { + if (signal) signal.removeEventListener('abort', abortAndFinalize); + }); + let body = res.pipe(new PassThrough$1()); + + const response_options = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter + }; + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding'); + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || request.method === 'HEAD' || codings === null || res.statusCode === 204 || res.statusCode === 304) { + response = new Response(body, response_options); + resolve(response); + return; + } + + // For Node v6+ + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.Z_SYNC_FLUSH, + finishFlush: zlib.Z_SYNC_FLUSH + }; + + // for gzip + if (codings == 'gzip' || codings == 'x-gzip') { + body = body.pipe(zlib.createGunzip(zlibOptions)); + response = new Response(body, response_options); + resolve(response); + return; + } + + // for deflate + if (codings == 'deflate' || codings == 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new PassThrough$1()); + raw.once('data', function (chunk) { + // see http://stackoverflow.com/questions/37519828 + if ((chunk[0] & 0x0F) === 0x08) { + body = body.pipe(zlib.createInflate()); + } else { + body = body.pipe(zlib.createInflateRaw()); + } + response = new Response(body, response_options); + resolve(response); + }); + return; + } + + // for br + if (codings == 'br' && typeof zlib.createBrotliDecompress === 'function') { + body = body.pipe(zlib.createBrotliDecompress()); + response = new Response(body, response_options); + resolve(response); + return; + } + + // otherwise, use response as-is + response = new Response(body, response_options); + resolve(response); + }); + + writeToStream(req, request); + }); +} +/** + * Redirect code matching + * + * @param Number code Status code + * @return Boolean + */ +fetch.isRedirect = function (code) { + return code === 301 || code === 302 || code === 303 || code === 307 || code === 308; +}; + +// expose Promise +fetch.Promise = global.Promise; + +module.exports = exports = fetch; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = exports; +exports.Headers = Headers; +exports.Request = Request; +exports.Response = Response; +exports.FetchError = FetchError; + + +/***/ }), +/* 455 */, +/* 456 */, +/* 457 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +/*eslint-disable max-len,no-use-before-define*/ + +var common = __webpack_require__(740); +var YAMLException = __webpack_require__(556); +var Mark = __webpack_require__(93); +var DEFAULT_SAFE_SCHEMA = __webpack_require__(723); +var DEFAULT_FULL_SCHEMA = __webpack_require__(910); + + +var _hasOwnProperty = Object.prototype.hasOwnProperty; + + +var CONTEXT_FLOW_IN = 1; +var CONTEXT_FLOW_OUT = 2; +var CONTEXT_BLOCK_IN = 3; +var CONTEXT_BLOCK_OUT = 4; + + +var CHOMPING_CLIP = 1; +var CHOMPING_STRIP = 2; +var CHOMPING_KEEP = 3; + + +var PATTERN_NON_PRINTABLE = /[\x00-\x08\x0B\x0C\x0E-\x1F\x7F-\x84\x86-\x9F\uFFFE\uFFFF]|[\uD800-\uDBFF](?![\uDC00-\uDFFF])|(?:[^\uD800-\uDBFF]|^)[\uDC00-\uDFFF]/; +var PATTERN_NON_ASCII_LINE_BREAKS = /[\x85\u2028\u2029]/; +var PATTERN_FLOW_INDICATORS = /[,\[\]\{\}]/; +var PATTERN_TAG_HANDLE = /^(?:!|!!|![a-z\-]+!)$/i; +var PATTERN_TAG_URI = /^(?:!|[^,\[\]\{\}])(?:%[0-9a-f]{2}|[0-9a-z\-#;\/\?:@&=\+\$,_\.!~\*'\(\)\[\]])*$/i; + + +function _class(obj) { return Object.prototype.toString.call(obj); } + +function is_EOL(c) { + return (c === 0x0A/* LF */) || (c === 0x0D/* CR */); +} + +function is_WHITE_SPACE(c) { + return (c === 0x09/* Tab */) || (c === 0x20/* Space */); +} + +function is_WS_OR_EOL(c) { + return (c === 0x09/* Tab */) || + (c === 0x20/* Space */) || + (c === 0x0A/* LF */) || + (c === 0x0D/* CR */); +} + +function is_FLOW_INDICATOR(c) { + return c === 0x2C/* , */ || + c === 0x5B/* [ */ || + c === 0x5D/* ] */ || + c === 0x7B/* { */ || + c === 0x7D/* } */; +} + +function fromHexCode(c) { + var lc; + + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; + } + + /*eslint-disable no-bitwise*/ + lc = c | 0x20; + + if ((0x61/* a */ <= lc) && (lc <= 0x66/* f */)) { + return lc - 0x61 + 10; + } + + return -1; +} + +function escapedHexLen(c) { + if (c === 0x78/* x */) { return 2; } + if (c === 0x75/* u */) { return 4; } + if (c === 0x55/* U */) { return 8; } + return 0; +} + +function fromDecimalCode(c) { + if ((0x30/* 0 */ <= c) && (c <= 0x39/* 9 */)) { + return c - 0x30; + } + + return -1; +} + +function simpleEscapeSequence(c) { + /* eslint-disable indent */ + return (c === 0x30/* 0 */) ? '\x00' : + (c === 0x61/* a */) ? '\x07' : + (c === 0x62/* b */) ? '\x08' : + (c === 0x74/* t */) ? '\x09' : + (c === 0x09/* Tab */) ? '\x09' : + (c === 0x6E/* n */) ? '\x0A' : + (c === 0x76/* v */) ? '\x0B' : + (c === 0x66/* f */) ? '\x0C' : + (c === 0x72/* r */) ? '\x0D' : + (c === 0x65/* e */) ? '\x1B' : + (c === 0x20/* Space */) ? ' ' : + (c === 0x22/* " */) ? '\x22' : + (c === 0x2F/* / */) ? '/' : + (c === 0x5C/* \ */) ? '\x5C' : + (c === 0x4E/* N */) ? '\x85' : + (c === 0x5F/* _ */) ? '\xA0' : + (c === 0x4C/* L */) ? '\u2028' : + (c === 0x50/* P */) ? '\u2029' : ''; +} + +function charFromCodepoint(c) { + if (c <= 0xFFFF) { + return String.fromCharCode(c); + } + // Encode UTF-16 surrogate pair + // https://en.wikipedia.org/wiki/UTF-16#Code_points_U.2B010000_to_U.2B10FFFF + return String.fromCharCode( + ((c - 0x010000) >> 10) + 0xD800, + ((c - 0x010000) & 0x03FF) + 0xDC00 + ); +} + +var simpleEscapeCheck = new Array(256); // integer, for fast access +var simpleEscapeMap = new Array(256); +for (var i = 0; i < 256; i++) { + simpleEscapeCheck[i] = simpleEscapeSequence(i) ? 1 : 0; + simpleEscapeMap[i] = simpleEscapeSequence(i); +} + + +function State(input, options) { + this.input = input; + + this.filename = options['filename'] || null; + this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; + this.onWarning = options['onWarning'] || null; + this.legacy = options['legacy'] || false; + this.json = options['json'] || false; + this.listener = options['listener'] || null; + + this.implicitTypes = this.schema.compiledImplicit; + this.typeMap = this.schema.compiledTypeMap; + + this.length = input.length; + this.position = 0; + this.line = 0; + this.lineStart = 0; + this.lineIndent = 0; + + this.documents = []; + + /* + this.version; + this.checkLineBreaks; + this.tagMap; + this.anchorMap; + this.tag; + this.anchor; + this.kind; + this.result;*/ + +} + + +function generateError(state, message) { + return new YAMLException( + message, + new Mark(state.filename, state.input, state.position, state.line, (state.position - state.lineStart))); +} + +function throwError(state, message) { + throw generateError(state, message); +} + +function throwWarning(state, message) { + if (state.onWarning) { + state.onWarning.call(null, generateError(state, message)); + } +} + + +var directiveHandlers = { + + YAML: function handleYamlDirective(state, name, args) { + + var match, major, minor; + + if (state.version !== null) { + throwError(state, 'duplication of %YAML directive'); + } + + if (args.length !== 1) { + throwError(state, 'YAML directive accepts exactly one argument'); + } + + match = /^([0-9]+)\.([0-9]+)$/.exec(args[0]); + + if (match === null) { + throwError(state, 'ill-formed argument of the YAML directive'); + } + + major = parseInt(match[1], 10); + minor = parseInt(match[2], 10); + + if (major !== 1) { + throwError(state, 'unacceptable YAML version of the document'); + } + + state.version = args[0]; + state.checkLineBreaks = (minor < 2); + + if (minor !== 1 && minor !== 2) { + throwWarning(state, 'unsupported YAML version of the document'); + } }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] - } - }], - createCard: ["POST /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] - } - }], - createColumn: ["POST /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] - } - }], - createForAuthenticatedUser: ["POST /user/projects", { - mediaType: { - previews: ["inertia"] + + TAG: function handleTagDirective(state, name, args) { + + var handle, prefix; + + if (args.length !== 2) { + throwError(state, 'TAG directive accepts exactly two arguments'); + } + + handle = args[0]; + prefix = args[1]; + + if (!PATTERN_TAG_HANDLE.test(handle)) { + throwError(state, 'ill-formed tag handle (first argument) of the TAG directive'); + } + + if (_hasOwnProperty.call(state.tagMap, handle)) { + throwError(state, 'there is a previously declared suffix for "' + handle + '" tag handle'); + } + + if (!PATTERN_TAG_URI.test(prefix)) { + throwError(state, 'ill-formed tag prefix (second argument) of the TAG directive'); + } + + state.tagMap[handle] = prefix; + } +}; + + +function captureSegment(state, start, end, checkJson) { + var _position, _length, _character, _result; + + if (start < end) { + _result = state.input.slice(start, end); + + if (checkJson) { + for (_position = 0, _length = _result.length; _position < _length; _position += 1) { + _character = _result.charCodeAt(_position); + if (!(_character === 0x09 || + (0x20 <= _character && _character <= 0x10FFFF))) { + throwError(state, 'expected valid JSON character'); + } } - }], - createForOrg: ["POST /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] + } else if (PATTERN_NON_PRINTABLE.test(_result)) { + throwError(state, 'the stream contains non-printable characters'); + } + + state.result += _result; + } +} + +function mergeMappings(state, destination, source, overridableKeys) { + var sourceKeys, key, index, quantity; + + if (!common.isObject(source)) { + throwError(state, 'cannot merge mappings; the provided source object is unacceptable'); + } + + sourceKeys = Object.keys(source); + + for (index = 0, quantity = sourceKeys.length; index < quantity; index += 1) { + key = sourceKeys[index]; + + if (!_hasOwnProperty.call(destination, key)) { + destination[key] = source[key]; + overridableKeys[key] = true; + } + } +} + +function storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, startLine, startPos) { + var index, quantity; + + // The output is a plain object here, so keys can only be strings. + // We need to convert keyNode to a string, but doing so can hang the process + // (deeply nested arrays that explode exponentially using aliases). + if (Array.isArray(keyNode)) { + keyNode = Array.prototype.slice.call(keyNode); + + for (index = 0, quantity = keyNode.length; index < quantity; index += 1) { + if (Array.isArray(keyNode[index])) { + throwError(state, 'nested arrays are not supported inside keys'); } - }], - createForRepo: ["POST /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] + + if (typeof keyNode === 'object' && _class(keyNode[index]) === '[object Object]') { + keyNode[index] = '[object Object]'; } - }], - delete: ["DELETE /projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } + } + + // Avoid code execution in load() via toString property + // (still use its own toString for arrays, timestamps, + // and whatever user schema extensions happen to have @@toStringTag) + if (typeof keyNode === 'object' && _class(keyNode) === '[object Object]') { + keyNode = '[object Object]'; + } + + + keyNode = String(keyNode); + + if (_result === null) { + _result = {}; + } + + if (keyTag === 'tag:yaml.org,2002:merge') { + if (Array.isArray(valueNode)) { + for (index = 0, quantity = valueNode.length; index < quantity; index += 1) { + mergeMappings(state, _result, valueNode[index], overridableKeys); } - }], - deleteCard: ["DELETE /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] + } else { + mergeMappings(state, _result, valueNode, overridableKeys); + } + } else { + if (!state.json && + !_hasOwnProperty.call(overridableKeys, keyNode) && + _hasOwnProperty.call(_result, keyNode)) { + state.line = startLine || state.line; + state.position = startPos || state.position; + throwError(state, 'duplicated mapping key'); + } + _result[keyNode] = valueNode; + delete overridableKeys[keyNode]; + } + + return _result; +} + +function readLineBreak(state) { + var ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x0A/* LF */) { + state.position++; + } else if (ch === 0x0D/* CR */) { + state.position++; + if (state.input.charCodeAt(state.position) === 0x0A/* LF */) { + state.position++; + } + } else { + throwError(state, 'a line break is expected'); + } + + state.line += 1; + state.lineStart = state.position; +} + +function skipSeparationSpace(state, allowComments, checkIndent) { + var lineBreaks = 0, + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (allowComments && ch === 0x23/* # */) { + do { + ch = state.input.charCodeAt(++state.position); + } while (ch !== 0x0A/* LF */ && ch !== 0x0D/* CR */ && ch !== 0); + } + + if (is_EOL(ch)) { + readLineBreak(state); + + ch = state.input.charCodeAt(state.position); + lineBreaks++; + state.lineIndent = 0; + + while (ch === 0x20/* Space */) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); } - }], - deleteColumn: ["DELETE /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] + } else { + break; + } + } + + if (checkIndent !== -1 && lineBreaks !== 0 && state.lineIndent < checkIndent) { + throwWarning(state, 'deficient indentation'); + } + + return lineBreaks; +} + +function testDocumentSeparator(state) { + var _position = state.position, + ch; + + ch = state.input.charCodeAt(_position); + + // Condition state.position === state.lineStart is tested + // in parent on each call, for efficiency. No needs to test here again. + if ((ch === 0x2D/* - */ || ch === 0x2E/* . */) && + ch === state.input.charCodeAt(_position + 1) && + ch === state.input.charCodeAt(_position + 2)) { + + _position += 3; + + ch = state.input.charCodeAt(_position); + + if (ch === 0 || is_WS_OR_EOL(ch)) { + return true; + } + } + + return false; +} + +function writeFoldedLines(state, count) { + if (count === 1) { + state.result += ' '; + } else if (count > 1) { + state.result += common.repeat('\n', count - 1); + } +} + + +function readPlainScalar(state, nodeIndent, withinFlowCollection) { + var preceding, + following, + captureStart, + captureEnd, + hasPendingContent, + _line, + _lineStart, + _lineIndent, + _kind = state.kind, + _result = state.result, + ch; + + ch = state.input.charCodeAt(state.position); + + if (is_WS_OR_EOL(ch) || + is_FLOW_INDICATOR(ch) || + ch === 0x23/* # */ || + ch === 0x26/* & */ || + ch === 0x2A/* * */ || + ch === 0x21/* ! */ || + ch === 0x7C/* | */ || + ch === 0x3E/* > */ || + ch === 0x27/* ' */ || + ch === 0x22/* " */ || + ch === 0x25/* % */ || + ch === 0x40/* @ */ || + ch === 0x60/* ` */) { + return false; + } + + if (ch === 0x3F/* ? */ || ch === 0x2D/* - */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + return false; + } + } + + state.kind = 'scalar'; + state.result = ''; + captureStart = captureEnd = state.position; + hasPendingContent = false; + + while (ch !== 0) { + if (ch === 0x3A/* : */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following) || + withinFlowCollection && is_FLOW_INDICATOR(following)) { + break; } - }], - get: ["GET /projects/{project_id}", { - mediaType: { - previews: ["inertia"] + + } else if (ch === 0x23/* # */) { + preceding = state.input.charCodeAt(state.position - 1); + + if (is_WS_OR_EOL(preceding)) { + break; } - }], - getCard: ["GET /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] + + } else if ((state.position === state.lineStart && testDocumentSeparator(state)) || + withinFlowCollection && is_FLOW_INDICATOR(ch)) { + break; + + } else if (is_EOL(ch)) { + _line = state.line; + _lineStart = state.lineStart; + _lineIndent = state.lineIndent; + skipSeparationSpace(state, false, -1); + + if (state.lineIndent >= nodeIndent) { + hasPendingContent = true; + ch = state.input.charCodeAt(state.position); + continue; + } else { + state.position = captureEnd; + state.line = _line; + state.lineStart = _lineStart; + state.lineIndent = _lineIndent; + break; } - }], - getColumn: ["GET /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] + } + + if (hasPendingContent) { + captureSegment(state, captureStart, captureEnd, false); + writeFoldedLines(state, state.line - _line); + captureStart = captureEnd = state.position; + hasPendingContent = false; + } + + if (!is_WHITE_SPACE(ch)) { + captureEnd = state.position + 1; + } + + ch = state.input.charCodeAt(++state.position); + } + + captureSegment(state, captureStart, captureEnd, false); + + if (state.result) { + return true; + } + + state.kind = _kind; + state.result = _result; + return false; +} + +function readSingleQuotedScalar(state, nodeIndent) { + var ch, + captureStart, captureEnd; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x27/* ' */) { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x27/* ' */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x27/* ' */) { + captureStart = state.position; + state.position++; + captureEnd = state.position; + } else { + return true; } - }], - getPermissionForUser: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] + + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a single quoted scalar'); + + } else { + state.position++; + captureEnd = state.position; + } + } + + throwError(state, 'unexpected end of the stream within a single quoted scalar'); +} + +function readDoubleQuotedScalar(state, nodeIndent) { + var captureStart, + captureEnd, + hexLength, + hexResult, + tmp, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x22/* " */) { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + state.position++; + captureStart = captureEnd = state.position; + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + if (ch === 0x22/* " */) { + captureSegment(state, captureStart, state.position, true); + state.position++; + return true; + + } else if (ch === 0x5C/* \ */) { + captureSegment(state, captureStart, state.position, true); + ch = state.input.charCodeAt(++state.position); + + if (is_EOL(ch)) { + skipSeparationSpace(state, false, nodeIndent); + + // TODO: rework to inline fn with no type cast? + } else if (ch < 256 && simpleEscapeCheck[ch]) { + state.result += simpleEscapeMap[ch]; + state.position++; + + } else if ((tmp = escapedHexLen(ch)) > 0) { + hexLength = tmp; + hexResult = 0; + + for (; hexLength > 0; hexLength--) { + ch = state.input.charCodeAt(++state.position); + + if ((tmp = fromHexCode(ch)) >= 0) { + hexResult = (hexResult << 4) + tmp; + + } else { + throwError(state, 'expected hexadecimal character'); + } + } + + state.result += charFromCodepoint(hexResult); + + state.position++; + + } else { + throwError(state, 'unknown escape sequence'); } - }], - listCards: ["GET /projects/columns/{column_id}/cards", { - mediaType: { - previews: ["inertia"] + + captureStart = captureEnd = state.position; + + } else if (is_EOL(ch)) { + captureSegment(state, captureStart, captureEnd, true); + writeFoldedLines(state, skipSeparationSpace(state, false, nodeIndent)); + captureStart = captureEnd = state.position; + + } else if (state.position === state.lineStart && testDocumentSeparator(state)) { + throwError(state, 'unexpected end of the document within a double quoted scalar'); + + } else { + state.position++; + captureEnd = state.position; + } + } + + throwError(state, 'unexpected end of the stream within a double quoted scalar'); +} + +function readFlowCollection(state, nodeIndent) { + var readNext = true, + _line, + _tag = state.tag, + _result, + _anchor = state.anchor, + following, + terminator, + isPair, + isExplicitPair, + isMapping, + overridableKeys = {}, + keyNode, + keyTag, + valueNode, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x5B/* [ */) { + terminator = 0x5D;/* ] */ + isMapping = false; + _result = []; + } else if (ch === 0x7B/* { */) { + terminator = 0x7D;/* } */ + isMapping = true; + _result = {}; + } else { + return false; + } + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(++state.position); + + while (ch !== 0) { + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if (ch === terminator) { + state.position++; + state.tag = _tag; + state.anchor = _anchor; + state.kind = isMapping ? 'mapping' : 'sequence'; + state.result = _result; + return true; + } else if (!readNext) { + throwError(state, 'missed comma between flow collection entries'); + } + + keyTag = keyNode = valueNode = null; + isPair = isExplicitPair = false; + + if (ch === 0x3F/* ? */) { + following = state.input.charCodeAt(state.position + 1); + + if (is_WS_OR_EOL(following)) { + isPair = isExplicitPair = true; + state.position++; + skipSeparationSpace(state, true, nodeIndent); } - }], - listCollaborators: ["GET /projects/{project_id}/collaborators", { - mediaType: { - previews: ["inertia"] + } + + _line = state.line; + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + keyTag = state.tag; + keyNode = state.result; + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if ((isExplicitPair || state.line === _line) && ch === 0x3A/* : */) { + isPair = true; + ch = state.input.charCodeAt(++state.position); + skipSeparationSpace(state, true, nodeIndent); + composeNode(state, nodeIndent, CONTEXT_FLOW_IN, false, true); + valueNode = state.result; + } + + if (isMapping) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode); + } else if (isPair) { + _result.push(storeMappingPair(state, null, overridableKeys, keyTag, keyNode, valueNode)); + } else { + _result.push(keyNode); + } + + skipSeparationSpace(state, true, nodeIndent); + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x2C/* , */) { + readNext = true; + ch = state.input.charCodeAt(++state.position); + } else { + readNext = false; + } + } + + throwError(state, 'unexpected end of the stream within a flow collection'); +} + +function readBlockScalar(state, nodeIndent) { + var captureStart, + folding, + chomping = CHOMPING_CLIP, + didReadContent = false, + detectedIndent = false, + textIndent = nodeIndent, + emptyLines = 0, + atMoreIndented = false, + tmp, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch === 0x7C/* | */) { + folding = false; + } else if (ch === 0x3E/* > */) { + folding = true; + } else { + return false; + } + + state.kind = 'scalar'; + state.result = ''; + + while (ch !== 0) { + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x2B/* + */ || ch === 0x2D/* - */) { + if (CHOMPING_CLIP === chomping) { + chomping = (ch === 0x2B/* + */) ? CHOMPING_KEEP : CHOMPING_STRIP; + } else { + throwError(state, 'repeat of a chomping mode identifier'); } - }], - listColumns: ["GET /projects/{project_id}/columns", { - mediaType: { - previews: ["inertia"] + + } else if ((tmp = fromDecimalCode(ch)) >= 0) { + if (tmp === 0) { + throwError(state, 'bad explicit indentation width of a block scalar; it cannot be less than one'); + } else if (!detectedIndent) { + textIndent = nodeIndent + tmp - 1; + detectedIndent = true; + } else { + throwError(state, 'repeat of an indentation width identifier'); } - }], - listForOrg: ["GET /orgs/{org}/projects", { - mediaType: { - previews: ["inertia"] + + } else { + break; + } + } + + if (is_WHITE_SPACE(ch)) { + do { ch = state.input.charCodeAt(++state.position); } + while (is_WHITE_SPACE(ch)); + + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (!is_EOL(ch) && (ch !== 0)); + } + } + + while (ch !== 0) { + readLineBreak(state); + state.lineIndent = 0; + + ch = state.input.charCodeAt(state.position); + + while ((!detectedIndent || state.lineIndent < textIndent) && + (ch === 0x20/* Space */)) { + state.lineIndent++; + ch = state.input.charCodeAt(++state.position); + } + + if (!detectedIndent && state.lineIndent > textIndent) { + textIndent = state.lineIndent; + } + + if (is_EOL(ch)) { + emptyLines++; + continue; + } + + // End of the scalar. + if (state.lineIndent < textIndent) { + + // Perform the chomping. + if (chomping === CHOMPING_KEEP) { + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } else if (chomping === CHOMPING_CLIP) { + if (didReadContent) { // i.e. only if the scalar is not empty. + state.result += '\n'; + } } - }], - listForRepo: ["GET /repos/{owner}/{repo}/projects", { - mediaType: { - previews: ["inertia"] + + // Break this `while` cycle and go to the funciton's epilogue. + break; + } + + // Folded style: use fancy rules to handle line breaks. + if (folding) { + + // Lines starting with white space characters (more-indented lines) are not folded. + if (is_WHITE_SPACE(ch)) { + atMoreIndented = true; + // except for the first content line (cf. Example 8.1) + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + + // End of more-indented block. + } else if (atMoreIndented) { + atMoreIndented = false; + state.result += common.repeat('\n', emptyLines + 1); + + // Just one line break - perceive as the same line. + } else if (emptyLines === 0) { + if (didReadContent) { // i.e. only if we have already read some scalar content. + state.result += ' '; + } + + // Several line breaks - perceive as different lines. + } else { + state.result += common.repeat('\n', emptyLines); } - }], - listForUser: ["GET /users/{username}/projects", { - mediaType: { - previews: ["inertia"] + + // Literal style: just add exact number of line breaks between content lines. + } else { + // Keep all line breaks except the header line break. + state.result += common.repeat('\n', didReadContent ? 1 + emptyLines : emptyLines); + } + + didReadContent = true; + detectedIndent = true; + emptyLines = 0; + captureStart = state.position; + + while (!is_EOL(ch) && (ch !== 0)) { + ch = state.input.charCodeAt(++state.position); + } + + captureSegment(state, captureStart, state.position, false); + } + + return true; +} + +function readBlockSequence(state, nodeIndent) { + var _line, + _tag = state.tag, + _anchor = state.anchor, + _result = [], + following, + detected = false, + ch; + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + + if (ch !== 0x2D/* - */) { + break; + } + + following = state.input.charCodeAt(state.position + 1); + + if (!is_WS_OR_EOL(following)) { + break; + } + + detected = true; + state.position++; + + if (skipSeparationSpace(state, true, -1)) { + if (state.lineIndent <= nodeIndent) { + _result.push(null); + ch = state.input.charCodeAt(state.position); + continue; } - }], - moveCard: ["POST /projects/columns/cards/{card_id}/moves", { - mediaType: { - previews: ["inertia"] + } + + _line = state.line; + composeNode(state, nodeIndent, CONTEXT_BLOCK_IN, false, true); + _result.push(state.result); + skipSeparationSpace(state, true, -1); + + ch = state.input.charCodeAt(state.position); + + if ((state.line === _line || state.lineIndent > nodeIndent) && (ch !== 0)) { + throwError(state, 'bad indentation of a sequence entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } + + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'sequence'; + state.result = _result; + return true; + } + return false; +} + +function readBlockMapping(state, nodeIndent, flowIndent) { + var following, + allowCompact, + _line, + _pos, + _tag = state.tag, + _anchor = state.anchor, + _result = {}, + overridableKeys = {}, + keyTag = null, + keyNode = null, + valueNode = null, + atExplicitKey = false, + detected = false, + ch; + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = _result; + } + + ch = state.input.charCodeAt(state.position); + + while (ch !== 0) { + following = state.input.charCodeAt(state.position + 1); + _line = state.line; // Save the current line. + _pos = state.position; + + // + // Explicit notation case. There are two separate blocks: + // first for the key (denoted by "?") and second for the value (denoted by ":") + // + if ((ch === 0x3F/* ? */ || ch === 0x3A/* : */) && is_WS_OR_EOL(following)) { + + if (ch === 0x3F/* ? */) { + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); + keyTag = keyNode = valueNode = null; + } + + detected = true; + atExplicitKey = true; + allowCompact = true; + + } else if (atExplicitKey) { + // i.e. 0x3A/* : */ === character after the explicit key. + atExplicitKey = false; + allowCompact = true; + + } else { + throwError(state, 'incomplete explicit mapping pair; a key node is missed; or followed by a non-tabulated empty line'); } - }], - moveColumn: ["POST /projects/columns/{column_id}/moves", { - mediaType: { - previews: ["inertia"] + + state.position += 1; + ch = following; + + // + // Implicit notation case. Flow-style node as the key first, then ":", and the value. + // + } else if (composeNode(state, flowIndent, CONTEXT_FLOW_OUT, false, true)) { + + if (state.line === _line) { + ch = state.input.charCodeAt(state.position); + + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (ch === 0x3A/* : */) { + ch = state.input.charCodeAt(++state.position); + + if (!is_WS_OR_EOL(ch)) { + throwError(state, 'a whitespace character is expected after the key-value separator within a block mapping'); + } + + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); + keyTag = keyNode = valueNode = null; + } + + detected = true; + atExplicitKey = false; + allowCompact = false; + keyTag = state.tag; + keyNode = state.result; + + } else if (detected) { + throwError(state, 'can not read an implicit mapping pair; a colon is missed'); + + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. + } + + } else if (detected) { + throwError(state, 'can not read a block mapping entry; a multiline key may not be an implicit key'); + + } else { + state.tag = _tag; + state.anchor = _anchor; + return true; // Keep the result of `composeNode`. } - }], - removeCollaborator: ["DELETE /projects/{project_id}/collaborators/{username}", { - mediaType: { - previews: ["inertia"] + + } else { + break; // Reading is done. Go to the epilogue. + } + + // + // Common reading code for both explicit and implicit notations. + // + if (state.line === _line || state.lineIndent > nodeIndent) { + if (composeNode(state, nodeIndent, CONTEXT_BLOCK_OUT, true, allowCompact)) { + if (atExplicitKey) { + keyNode = state.result; + } else { + valueNode = state.result; + } } - }], - reviewUserPermissionLevel: ["GET /projects/{project_id}/collaborators/{username}/permission", { - mediaType: { - previews: ["inertia"] + + if (!atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, valueNode, _line, _pos); + keyTag = keyNode = valueNode = null; } - }, { - renamed: ["projects", "getPermissionForUser"] - }], - update: ["PATCH /projects/{project_id}", { - mediaType: { - previews: ["inertia"] + + skipSeparationSpace(state, true, -1); + ch = state.input.charCodeAt(state.position); + } + + if (state.lineIndent > nodeIndent && (ch !== 0)) { + throwError(state, 'bad indentation of a mapping entry'); + } else if (state.lineIndent < nodeIndent) { + break; + } + } + + // + // Epilogue. + // + + // Special case: last mapping's node contains only the key in explicit notation. + if (atExplicitKey) { + storeMappingPair(state, _result, overridableKeys, keyTag, keyNode, null); + } + + // Expose the resulting mapping. + if (detected) { + state.tag = _tag; + state.anchor = _anchor; + state.kind = 'mapping'; + state.result = _result; + } + + return detected; +} + +function readTagProperty(state) { + var _position, + isVerbatim = false, + isNamed = false, + tagHandle, + tagName, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x21/* ! */) return false; + + if (state.tag !== null) { + throwError(state, 'duplication of a tag property'); + } + + ch = state.input.charCodeAt(++state.position); + + if (ch === 0x3C/* < */) { + isVerbatim = true; + ch = state.input.charCodeAt(++state.position); + + } else if (ch === 0x21/* ! */) { + isNamed = true; + tagHandle = '!!'; + ch = state.input.charCodeAt(++state.position); + + } else { + tagHandle = '!'; + } + + _position = state.position; + + if (isVerbatim) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && ch !== 0x3E/* > */); + + if (state.position < state.length) { + tagName = state.input.slice(_position, state.position); + ch = state.input.charCodeAt(++state.position); + } else { + throwError(state, 'unexpected end of the stream within a verbatim tag'); + } + } else { + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + + if (ch === 0x21/* ! */) { + if (!isNamed) { + tagHandle = state.input.slice(_position - 1, state.position + 1); + + if (!PATTERN_TAG_HANDLE.test(tagHandle)) { + throwError(state, 'named tag handle cannot contain such characters'); + } + + isNamed = true; + _position = state.position + 1; + } else { + throwError(state, 'tag suffix cannot contain exclamation marks'); + } } - }], - updateCard: ["PATCH /projects/columns/cards/{card_id}", { - mediaType: { - previews: ["inertia"] + + ch = state.input.charCodeAt(++state.position); + } + + tagName = state.input.slice(_position, state.position); + + if (PATTERN_FLOW_INDICATORS.test(tagName)) { + throwError(state, 'tag suffix cannot contain flow indicator characters'); + } + } + + if (tagName && !PATTERN_TAG_URI.test(tagName)) { + throwError(state, 'tag name cannot contain such characters: ' + tagName); + } + + if (isVerbatim) { + state.tag = tagName; + + } else if (_hasOwnProperty.call(state.tagMap, tagHandle)) { + state.tag = state.tagMap[tagHandle] + tagName; + + } else if (tagHandle === '!') { + state.tag = '!' + tagName; + + } else if (tagHandle === '!!') { + state.tag = 'tag:yaml.org,2002:' + tagName; + + } else { + throwError(state, 'undeclared tag handle "' + tagHandle + '"'); + } + + return true; +} + +function readAnchorProperty(state) { + var _position, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x26/* & */) return false; + + if (state.anchor !== null) { + throwError(state, 'duplication of an anchor property'); + } + + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (state.position === _position) { + throwError(state, 'name of an anchor node must contain at least one character'); + } + + state.anchor = state.input.slice(_position, state.position); + return true; +} + +function readAlias(state) { + var _position, alias, + ch; + + ch = state.input.charCodeAt(state.position); + + if (ch !== 0x2A/* * */) return false; + + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch) && !is_FLOW_INDICATOR(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + if (state.position === _position) { + throwError(state, 'name of an alias node must contain at least one character'); + } + + alias = state.input.slice(_position, state.position); + + if (!state.anchorMap.hasOwnProperty(alias)) { + throwError(state, 'unidentified alias "' + alias + '"'); + } + + state.result = state.anchorMap[alias]; + skipSeparationSpace(state, true, -1); + return true; +} + +function composeNode(state, parentIndent, nodeContext, allowToSeek, allowCompact) { + var allowBlockStyles, + allowBlockScalars, + allowBlockCollections, + indentStatus = 1, // 1: this>parent, 0: this=parent, -1: this parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; } - }], - updateColumn: ["PATCH /projects/columns/{column_id}", { - mediaType: { - previews: ["inertia"] + } + } + + if (indentStatus === 1) { + while (readTagProperty(state) || readAnchorProperty(state)) { + if (skipSeparationSpace(state, true, -1)) { + atNewLine = true; + allowBlockCollections = allowBlockStyles; + + if (state.lineIndent > parentIndent) { + indentStatus = 1; + } else if (state.lineIndent === parentIndent) { + indentStatus = 0; + } else if (state.lineIndent < parentIndent) { + indentStatus = -1; + } + } else { + allowBlockCollections = false; } - }] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { - renamed: ["pulls", "createReviewComment"] - }], - createReplyForReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies"], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - createReviewCommentReply: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", {}, { - renamed: ["pulls", "createReplyForReviewComment"] - }], - createReviewRequest: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { - renamed: ["pulls", "requestReviewers"] - }], - deleteComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { - renamed: ["pulls", "deleteReviewComment"] - }], - deletePendingReview: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - deleteReviewComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - deleteReviewRequest: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { - renamed: ["pulls", "removeRequestedReviewers"] - }], - dismissReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals"], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { - renamed: ["pulls", "getReviewComment"] - }], - getCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", {}, { - renamed: ["pulls", "listCommentsForReview"] - }], - getReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", {}, { - renamed: ["pulls", "listReviewComments"] - }], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments", {}, { - renamed: ["pulls", "listReviewCommentsForRepo"] - }], - listCommentsForReview: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments"], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - listReviewComments: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/comments"], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviewRequests: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", {}, { - renamed: ["pulls", "listRequestedReviewers"] - }], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: ["DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - requestReviewers: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers"], - submitReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events"], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch", { - mediaType: { - previews: ["lydian"] + } + } + + if (allowBlockCollections) { + allowBlockCollections = atNewLine || allowCompact; + } + + if (indentStatus === 1 || CONTEXT_BLOCK_OUT === nodeContext) { + if (CONTEXT_FLOW_IN === nodeContext || CONTEXT_FLOW_OUT === nodeContext) { + flowIndent = parentIndent; + } else { + flowIndent = parentIndent + 1; + } + + blockIndent = state.position - state.lineStart; + + if (indentStatus === 1) { + if (allowBlockCollections && + (readBlockSequence(state, blockIndent) || + readBlockMapping(state, blockIndent, flowIndent)) || + readFlowCollection(state, flowIndent)) { + hasContent = true; + } else { + if ((allowBlockScalars && readBlockScalar(state, flowIndent)) || + readSingleQuotedScalar(state, flowIndent) || + readDoubleQuotedScalar(state, flowIndent)) { + hasContent = true; + + } else if (readAlias(state)) { + hasContent = true; + + if (state.tag !== null || state.anchor !== null) { + throwError(state, 'alias node should not have any properties'); + } + + } else if (readPlainScalar(state, flowIndent, CONTEXT_FLOW_IN === nodeContext)) { + hasContent = true; + + if (state.tag === null) { + state.tag = '?'; + } + } + + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } } - }], - updateComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}", {}, { - renamed: ["pulls", "updateReviewComment"] - }], - updateReview: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}"], - updateReviewComment: ["PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}"] - }, - rateLimit: { - get: ["GET /rate_limit"] - }, - reactions: { - createForCommitComment: ["POST /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + } else if (indentStatus === 0) { + // Special case: block sequences are allowed to have same indentation level as the parent. + // http://www.yaml.org/spec/1.2/spec.html#id2799784 + hasContent = allowBlockCollections && readBlockSequence(state, blockIndent); + } + } + + if (state.tag !== null && state.tag !== '!') { + if (state.tag === '?') { + // Implicit resolving is not allowed for non-scalar types, and '?' + // non-specific tag is only automatically assigned to plain scalars. + // + // We only need to check kind conformity in case user explicitly assigns '?' + // tag, for example like this: "! [0]" + // + if (state.result !== null && state.kind !== 'scalar') { + throwError(state, 'unacceptable node kind for ! tag; it should be "scalar", not "' + state.kind + '"'); } - }], - createForIssue: ["POST /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + + for (typeIndex = 0, typeQuantity = state.implicitTypes.length; typeIndex < typeQuantity; typeIndex += 1) { + type = state.implicitTypes[typeIndex]; + + if (type.resolve(state.result)) { // `state.result` updated in resolver if matched + state.result = type.construct(state.result); + state.tag = type.tag; + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } + break; + } } - }], - createForIssueComment: ["POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + } else if (_hasOwnProperty.call(state.typeMap[state.kind || 'fallback'], state.tag)) { + type = state.typeMap[state.kind || 'fallback'][state.tag]; + + if (state.result !== null && type.kind !== state.kind) { + throwError(state, 'unacceptable node kind for !<' + state.tag + '> tag; it should be "' + type.kind + '", not "' + state.kind + '"'); } - }], - createForPullRequestReviewComment: ["POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + + if (!type.resolve(state.result)) { // `state.result` updated in resolver if matched + throwError(state, 'cannot resolve a node with !<' + state.tag + '> explicit tag'); + } else { + state.result = type.construct(state.result); + if (state.anchor !== null) { + state.anchorMap[state.anchor] = state.result; + } } - }], - createForTeamDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + } else { + throwError(state, 'unknown tag !<' + state.tag + '>'); + } + } + + if (state.listener !== null) { + state.listener('close', state); + } + return state.tag !== null || state.anchor !== null || hasContent; +} + +function readDocument(state) { + var documentStart = state.position, + _position, + directiveName, + directiveArgs, + hasDirectives = false, + ch; + + state.version = null; + state.checkLineBreaks = state.legacy; + state.tagMap = {}; + state.anchorMap = {}; + + while ((ch = state.input.charCodeAt(state.position)) !== 0) { + skipSeparationSpace(state, true, -1); + + ch = state.input.charCodeAt(state.position); + + if (state.lineIndent > 0 || ch !== 0x25/* % */) { + break; + } + + hasDirectives = true; + ch = state.input.charCodeAt(++state.position); + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); + } + + directiveName = state.input.slice(_position, state.position); + directiveArgs = []; + + if (directiveName.length < 1) { + throwError(state, 'directive name must not be less than one character in length'); + } + + while (ch !== 0) { + while (is_WHITE_SPACE(ch)) { + ch = state.input.charCodeAt(++state.position); } - }], - createForTeamDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + + if (ch === 0x23/* # */) { + do { ch = state.input.charCodeAt(++state.position); } + while (ch !== 0 && !is_EOL(ch)); + break; } - }], - delete: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + if (is_EOL(ch)) break; + + _position = state.position; + + while (ch !== 0 && !is_WS_OR_EOL(ch)) { + ch = state.input.charCodeAt(++state.position); } - }, { - renamed: ["reactions", "deleteLegacy"] - }], - deleteForCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + directiveArgs.push(state.input.slice(_position, state.position)); + } + + if (ch !== 0) readLineBreak(state); + + if (_hasOwnProperty.call(directiveHandlers, directiveName)) { + directiveHandlers[directiveName](state, directiveName, directiveArgs); + } else { + throwWarning(state, 'unknown document directive "' + directiveName + '"'); + } + } + + skipSeparationSpace(state, true, -1); + + if (state.lineIndent === 0 && + state.input.charCodeAt(state.position) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 1) === 0x2D/* - */ && + state.input.charCodeAt(state.position + 2) === 0x2D/* - */) { + state.position += 3; + skipSeparationSpace(state, true, -1); + + } else if (hasDirectives) { + throwError(state, 'directives end mark is expected'); + } + + composeNode(state, state.lineIndent - 1, CONTEXT_BLOCK_OUT, false, true); + skipSeparationSpace(state, true, -1); + + if (state.checkLineBreaks && + PATTERN_NON_ASCII_LINE_BREAKS.test(state.input.slice(documentStart, state.position))) { + throwWarning(state, 'non-ASCII line breaks are interpreted as content'); + } + + state.documents.push(state.result); + + if (state.position === state.lineStart && testDocumentSeparator(state)) { + + if (state.input.charCodeAt(state.position) === 0x2E/* . */) { + state.position += 3; + skipSeparationSpace(state, true, -1); + } + return; + } + + if (state.position < (state.length - 1)) { + throwError(state, 'end of the stream or a document separator is expected'); + } else { + return; + } +} + + +function loadDocuments(input, options) { + input = String(input); + options = options || {}; + + if (input.length !== 0) { + + // Add tailing `\n` if not exists + if (input.charCodeAt(input.length - 1) !== 0x0A/* LF */ && + input.charCodeAt(input.length - 1) !== 0x0D/* CR */) { + input += '\n'; + } + + // Strip BOM + if (input.charCodeAt(0) === 0xFEFF) { + input = input.slice(1); + } + } + + var state = new State(input, options); + + var nullpos = input.indexOf('\0'); + + if (nullpos !== -1) { + state.position = nullpos; + throwError(state, 'null byte is not allowed in input'); + } + + // Use 0 as string terminator. That significantly simplifies bounds check. + state.input += '\0'; + + while (state.input.charCodeAt(state.position) === 0x20/* Space */) { + state.lineIndent += 1; + state.position += 1; + } + + while (state.position < (state.length - 1)) { + readDocument(state); + } + + return state.documents; +} + + +function loadAll(input, iterator, options) { + if (iterator !== null && typeof iterator === 'object' && typeof options === 'undefined') { + options = iterator; + iterator = null; + } + + var documents = loadDocuments(input, options); + + if (typeof iterator !== 'function') { + return documents; + } + + for (var index = 0, length = documents.length; index < length; index += 1) { + iterator(documents[index]); + } +} + + +function load(input, options) { + var documents = loadDocuments(input, options); + + if (documents.length === 0) { + /*eslint-disable no-undefined*/ + return undefined; + } else if (documents.length === 1) { + return documents[0]; + } + throw new YAMLException('expected a single document in the stream, but found more'); +} + + +function safeLoadAll(input, iterator, options) { + if (typeof iterator === 'object' && iterator !== null && typeof options === 'undefined') { + options = iterator; + iterator = null; + } + + return loadAll(input, iterator, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + + +function safeLoad(input, options) { + return load(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + + +module.exports.loadAll = loadAll; +module.exports.load = load; +module.exports.safeLoadAll = safeLoadAll; +module.exports.safeLoad = safeLoad; + + +/***/ }), +/* 458 */, +/* 459 */, +/* 460 */, +/* 461 */, +/* 462 */ +/***/ (function(module) { + +"use strict"; + + +// See http://www.robvanderwoude.com/escapechars.php +const metaCharsRegExp = /([()\][%!^"`<>&|;, *?])/g; + +function escapeCommand(arg) { + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + return arg; +} + +function escapeArgument(arg, doubleEscapeMetaChars) { + // Convert to string + arg = `${arg}`; + + // Algorithm below is based on https://qntm.org/cmd + + // Sequence of backslashes followed by a double quote: + // double up all the backslashes and escape the double quote + arg = arg.replace(/(\\*)"/g, '$1$1\\"'); + + // Sequence of backslashes followed by the end of the string + // (which will become a double quote later): + // double up all the backslashes + arg = arg.replace(/(\\*)$/, '$1$1'); + + // All other backslashes occur literally + + // Quote the whole thing: + arg = `"${arg}"`; + + // Escape meta chars + arg = arg.replace(metaCharsRegExp, '^$1'); + + // Double escape meta chars if necessary + if (doubleEscapeMetaChars) { + arg = arg.replace(metaCharsRegExp, '^$1'); + } + + return arg; +} + +module.exports.command = escapeCommand; +module.exports.argument = escapeArgument; + + +/***/ }), +/* 463 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var deprecation = __webpack_require__(692); +var once = _interopDefault(__webpack_require__(49)); + +const logOnce = once(deprecation => console.warn(deprecation)); +/** + * Error with extra properties to help with debugging + */ + +class RequestError extends Error { + constructor(message, statusCode, options) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = "HttpError"; + this.status = statusCode; + Object.defineProperty(this, "code", { + get() { + logOnce(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); + return statusCode; } - }], - deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + }); + this.headers = options.headers || {}; // redact request credentials without mutating original request options + + const requestCopy = Object.assign({}, options.request); + + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") + }); + } + + requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit + // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications + .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended + // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header + .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; + } + +} + +exports.RequestError = RequestError; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 464 */, +/* 465 */, +/* 466 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var Stream = __webpack_require__(794).Stream + +module.exports = legacy + +function legacy (fs) { + return { + ReadStream: ReadStream, + WriteStream: WriteStream + } + + function ReadStream (path, options) { + if (!(this instanceof ReadStream)) return new ReadStream(path, options); + + Stream.call(this); + + var self = this; + + this.path = path; + this.fd = null; + this.readable = true; + this.paused = false; + + this.flags = 'r'; + this.mode = 438; /*=0666*/ + this.bufferSize = 64 * 1024; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.encoding) this.setEncoding(this.encoding); + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); } - }], - deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + if (this.end === undefined) { + this.end = Infinity; + } else if ('number' !== typeof this.end) { + throw TypeError('end must be a Number'); } - }], - deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + if (this.start > this.end) { + throw new Error('start must be <= end'); } - }], - deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + this.pos = this.start; + } + + if (this.fd !== null) { + process.nextTick(function() { + self._read(); + }); + return; + } + + fs.open(this.path, this.flags, this.mode, function (err, fd) { + if (err) { + self.emit('error', err); + self.readable = false; + return; } - }], - deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + + self.fd = fd; + self.emit('open', fd); + self._read(); + }) + } + + function WriteStream (path, options) { + if (!(this instanceof WriteStream)) return new WriteStream(path, options); + + Stream.call(this); + + this.path = path; + this.fd = null; + this.writable = true; + + this.flags = 'w'; + this.encoding = 'binary'; + this.mode = 438; /*=0666*/ + this.bytesWritten = 0; + + options = options || {}; + + // Mixin options into this + var keys = Object.keys(options); + for (var index = 0, length = keys.length; index < length; index++) { + var key = keys[index]; + this[key] = options[key]; + } + + if (this.start !== undefined) { + if ('number' !== typeof this.start) { + throw TypeError('start must be a Number'); } - }], - deleteLegacy: ["DELETE /reactions/{reaction_id}", { - mediaType: { - previews: ["squirrel-girl"] + if (this.start < 0) { + throw new Error('start must be >= zero'); } - }, { - deprecated: "octokit.reactions.deleteLegacy() is deprecated, see https://developer.github.com/v3/reactions/#delete-a-reaction-legacy" - }], - listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + + this.pos = this.start; + } + + this.busy = false; + this._queue = []; + + if (this.fd === null) { + this._open = fs.open; + this._queue.push([this._open, this.path, this.flags, this.mode, undefined]); + this.flush(); + } + } +} + + +/***/ }), +/* 467 */, +/* 468 */, +/* 469 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const jsonFile = __webpack_require__(666) + +module.exports = { + // jsonfile exports + readJson: jsonFile.readFile, + readJsonSync: jsonFile.readFileSync, + writeJson: jsonFile.writeFile, + writeJsonSync: jsonFile.writeFileSync +} + + +/***/ }), +/* 470 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const command_1 = __webpack_require__(431); +const file_command_1 = __webpack_require__(102); +const utils_1 = __webpack_require__(394); +const os = __importStar(__webpack_require__(87)); +const path = __importStar(__webpack_require__(622)); +/** + * The code to exit an action + */ +var ExitCode; +(function (ExitCode) { + /** + * A code indicating that the action was successful + */ + ExitCode[ExitCode["Success"] = 0] = "Success"; + /** + * A code indicating that the action was a failure + */ + ExitCode[ExitCode["Failure"] = 1] = "Failure"; +})(ExitCode = exports.ExitCode || (exports.ExitCode = {})); +//----------------------------------------------------------------------- +// Variables +//----------------------------------------------------------------------- +/** + * Sets env variable for this action and future actions in the job + * @param name the name of the variable to set + * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function exportVariable(name, val) { + const convertedVal = utils_1.toCommandValue(val); + process.env[name] = convertedVal; + const filePath = process.env['GITHUB_ENV'] || ''; + if (filePath) { + const delimiter = '_GitHubActionsFileCommandDelimeter_'; + const commandValue = `${name}<<${delimiter}${os.EOL}${convertedVal}${os.EOL}${delimiter}`; + file_command_1.issueCommand('ENV', commandValue); + } + else { + command_1.issueCommand('set-env', { name }, convertedVal); + } +} +exports.exportVariable = exportVariable; +/** + * Registers a secret which will get masked from logs + * @param secret value of the secret + */ +function setSecret(secret) { + command_1.issueCommand('add-mask', {}, secret); +} +exports.setSecret = setSecret; +/** + * Prepends inputPath to the PATH (for this action and future actions) + * @param inputPath + */ +function addPath(inputPath) { + const filePath = process.env['GITHUB_PATH'] || ''; + if (filePath) { + file_command_1.issueCommand('PATH', inputPath); + } + else { + command_1.issueCommand('add-path', {}, inputPath); + } + process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`; +} +exports.addPath = addPath; +/** + * Gets the value of an input. The value is also trimmed. + * + * @param name name of the input to get + * @param options optional. See InputOptions. + * @returns string + */ +function getInput(name, options) { + const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || ''; + if (options && options.required && !val) { + throw new Error(`Input required and not supplied: ${name}`); + } + return val.trim(); +} +exports.getInput = getInput; +/** + * Sets the value of an output. + * + * @param name name of the output to set + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function setOutput(name, value) { + command_1.issueCommand('set-output', { name }, value); +} +exports.setOutput = setOutput; +/** + * Enables or disables the echoing of commands into stdout for the rest of the step. + * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set. + * + */ +function setCommandEcho(enabled) { + command_1.issue('echo', enabled ? 'on' : 'off'); +} +exports.setCommandEcho = setCommandEcho; +//----------------------------------------------------------------------- +// Results +//----------------------------------------------------------------------- +/** + * Sets the action status to failed. + * When the action exits it will be with an exit code of 1 + * @param message add error issue message + */ +function setFailed(message) { + process.exitCode = ExitCode.Failure; + error(message); +} +exports.setFailed = setFailed; +//----------------------------------------------------------------------- +// Logging Commands +//----------------------------------------------------------------------- +/** + * Gets whether Actions Step Debug is on or not + */ +function isDebug() { + return process.env['RUNNER_DEBUG'] === '1'; +} +exports.isDebug = isDebug; +/** + * Writes debug message to user log + * @param message debug message + */ +function debug(message) { + command_1.issueCommand('debug', {}, message); +} +exports.debug = debug; +/** + * Adds an error issue + * @param message error issue message. Errors will be converted to string via toString() + */ +function error(message) { + command_1.issue('error', message instanceof Error ? message.toString() : message); +} +exports.error = error; +/** + * Adds an warning issue + * @param message warning issue message. Errors will be converted to string via toString() + */ +function warning(message) { + command_1.issue('warning', message instanceof Error ? message.toString() : message); +} +exports.warning = warning; +/** + * Writes info to log with console.log. + * @param message info message + */ +function info(message) { + process.stdout.write(message + os.EOL); +} +exports.info = info; +/** + * Begin an output group. + * + * Output until the next `groupEnd` will be foldable in this group + * + * @param name The name of the output group + */ +function startGroup(name) { + command_1.issue('group', name); +} +exports.startGroup = startGroup; +/** + * End an output group. + */ +function endGroup() { + command_1.issue('endgroup'); +} +exports.endGroup = endGroup; +/** + * Wrap an asynchronous function call in a group. + * + * Returns the same type as the function itself. + * + * @param name The name of the group + * @param fn The function to wrap in the group + */ +function group(name, fn) { + return __awaiter(this, void 0, void 0, function* () { + startGroup(name); + let result; + try { + result = yield fn(); + } + finally { + endGroup(); + } + return result; + }); +} +exports.group = group; +//----------------------------------------------------------------------- +// Wrapper action state +//----------------------------------------------------------------------- +/** + * Saves state for current action, the state can only be retrieved by this action's post job execution. + * + * @param name name of the state to store + * @param value value to store. Non-string values will be converted to a string via JSON.stringify + */ +// eslint-disable-next-line @typescript-eslint/no-explicit-any +function saveState(name, value) { + command_1.issueCommand('save-state', { name }, value); +} +exports.saveState = saveState; +/** + * Gets the value of an state set by this action's main execution. + * + * @param name name of the state to get + * @returns string + */ +function getState(name) { + return process.env[`STATE_${name}`] || ''; +} +exports.getState = getState; +//# sourceMappingURL=core.js.map + +/***/ }), +/* 471 */, +/* 472 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const file = __webpack_require__(149) +const link = __webpack_require__(900) +const symlink = __webpack_require__(849) + +module.exports = { + // file + createFile: file.createFile, + createFileSync: file.createFileSync, + ensureFile: file.createFile, + ensureFileSync: file.createFileSync, + // link + createLink: link.createLink, + createLinkSync: link.createLinkSync, + ensureLink: link.createLink, + ensureLinkSync: link.createLinkSync, + // symlink + createSymlink: symlink.createSymlink, + createSymlinkSync: symlink.createSymlinkSync, + ensureSymlink: symlink.createSymlink, + ensureSymlinkSync: symlink.createSymlinkSync +} + + +/***/ }), +/* 473 */, +/* 474 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const assert = __webpack_require__(357) + +const isWindows = (process.platform === 'win32') + +function defaults (options) { + const methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(m => { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 +} + +function rimraf (p, options, cb) { + let busyTries = 0 + + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert.strictEqual(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === 'EBUSY' || er.code === 'ENOTEMPTY' || er.code === 'EPERM') && + busyTries < options.maxBusyTries) { + busyTries++ + const time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(() => rimraf_(p, options, CB), time) } - }], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + + // already gone + if (er.code === 'ENOENT') er = null + } + + cb(er) + }) +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, (er, st) => { + if (er && er.code === 'ENOENT') { + return cb(null) + } + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === 'EPERM' && isWindows) { + return fixWinEPERM(p, options, er, cb) + } + + if (st && st.isDirectory()) { + return rmdir(p, options, er, cb) + } + + options.unlink(p, er => { + if (er) { + if (er.code === 'ENOENT') { + return cb(null) + } + if (er.code === 'EPERM') { + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + } + if (er.code === 'EISDIR') { + return rmdir(p, options, er, cb) + } } - }], - listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.chmod(p, 0o666, er2 => { + if (er2) { + cb(er2.code === 'ENOENT' ? null : er) + } else { + options.stat(p, (er3, stats) => { + if (er3) { + cb(er3.code === 'ENOENT' ? null : er) + } else if (stats.isDirectory()) { + rmdir(p, options, er, cb) + } else { + options.unlink(p, cb) + } + }) + } + }) +} + +function fixWinEPERMSync (p, options, er) { + let stats + + assert(p) + assert(options) + + try { + options.chmodSync(p, 0o666) + } catch (er2) { + if (er2.code === 'ENOENT') { + return + } else { + throw er + } + } + + try { + stats = options.statSync(p) + } catch (er3) { + if (er3.code === 'ENOENT') { + return + } else { + throw er + } + } + + if (stats.isDirectory()) { + rmdirSync(p, options, er) + } else { + options.unlinkSync(p) + } +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, er => { + if (er && (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM')) { + rmkids(p, options, cb) + } else if (er && er.code === 'ENOTDIR') { + cb(originalEr) + } else { + cb(er) + } + }) +} + +function rmkids (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, (er, files) => { + if (er) return cb(er) + + let n = files.length + let errState + + if (n === 0) return options.rmdir(p, cb) + + files.forEach(f => { + rimraf(path.join(p, f), options, er => { + if (errState) { + return + } + if (er) return cb(errState = er) + if (--n === 0) { + options.rmdir(p, cb) + } + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + let st + + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.strictEqual(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.strictEqual(typeof options, 'object', 'rimraf: options should be object') + + try { + st = options.lstatSync(p) + } catch (er) { + if (er.code === 'ENOENT') { + return + } + + // Windows can EPERM on stat. Life is suffering. + if (er.code === 'EPERM' && isWindows) { + fixWinEPERMSync(p, options, er) + } + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) { + rmdirSync(p, options, null) + } else { + options.unlinkSync(p) + } + } catch (er) { + if (er.code === 'ENOENT') { + return + } else if (er.code === 'EPERM') { + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + } else if (er.code !== 'EISDIR') { + throw er + } + rmdirSync(p, options, er) + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === 'ENOTDIR') { + throw originalEr + } else if (er.code === 'ENOTEMPTY' || er.code === 'EEXIST' || er.code === 'EPERM') { + rmkidsSync(p, options) + } else if (er.code !== 'ENOENT') { + throw er + } + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) + + if (isWindows) { + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + const startTime = Date.now() + do { + try { + const ret = options.rmdirSync(p, options) + return ret + } catch {} + } while (Date.now() - startTime < 500) // give up after 500ms + } else { + const ret = options.rmdirSync(p, options) + return ret + } +} + +module.exports = rimraf +rimraf.sync = rimrafSync + + +/***/ }), +/* 475 */, +/* 476 */, +/* 477 */ +/***/ (function(module) { + +function stringify (obj, options = {}) { + const EOL = options.EOL || '\n' + + const str = JSON.stringify(obj, options ? options.replacer : null, options.spaces) + + return str.replace(/\n/g, EOL) + EOL +} + +function stripBom (content) { + // we do this because JSON.parse would convert it to a utf8 string if encoding wasn't specified + if (Buffer.isBuffer(content)) content = content.toString('utf8') + return content.replace(/^\uFEFF/, '') +} + +module.exports = { stringify, stripBom } + + +/***/ }), +/* 478 */, +/* 479 */, +/* 480 */, +/* 481 */, +/* 482 */, +/* 483 */, +/* 484 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { spawn } = __webpack_require__(129); +var assert = __webpack_require__(357); + +const { TimeoutError, logger } = __webpack_require__(79); +const fs = __webpack_require__(747); + +class ExitError extends Error { + constructor(message, code) { + super(message); + this.code = code; + } +} + +const FETCH_DEPTH = 10; + +const COMMON_ARGS = [ + "-c", + "user.name=GitHub", + "-c", + "user.email=noreply@github.com" +]; + +function git(cwd, ...args) { + const stdio = [ + "ignore", + "pipe", + logger.level === "trace" || logger.level === "debug" ? "inherit" : "pipe" + ]; + // the URL passed to the clone command could contain a password! + const command = `git ${args.join(" ")}`; + logger.debug("Executing", command); + return new Promise((resolve, reject) => { + const proc = spawn( + "git", + COMMON_ARGS.concat(args.filter(a => a !== null)), + { cwd, stdio } + ); + const buffers = []; + proc.stdout.on("data", data => buffers.push(data)); + proc.stderr.on("data", data => buffers.push(data)); + + proc.on("error", () => { + reject(new Error(`command failed: ${command}`)); + }); + proc.on("exit", code => { + const stdoutData = Buffer.concat(buffers); + if (code === 0) { + resolve(stdoutData.toString("utf8").trim()); + } else { + reject( + new ExitError( + `command ${command} failed with code ${code}. Error Message: ${stdoutData}`, + code + ) + ); } - }], - listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + }); + }); +} + +async function clone(from, to, branch) { + if (!fs.existsSync(to)) { + await git( + ".", + "clone", + "--quiet", + "--shallow-submodules", + "--no-tags", + "--branch", + branch, + from, + to + ); + } else { + logger.warn(`Folder ${to} already exist. Won't clone`); + } +} + +async function fetch(dir, branch) { + await git( + dir, + "fetch", + "--quiet", + "origin", + `${branch}:refs/remotes/origin/${branch}` + ); +} + +async function fetchUntilMergeBase(dir, branch, timeout) { + const maxTime = new Date().getTime() + timeout; + const ref = `refs/remotes/origin/${branch}`; + while (new Date().getTime() < maxTime) { + const base = await mergeBase(dir, "HEAD", ref); + if (base) { + const bases = [base]; + const parents = await mergeCommits(dir, ref); + let fetchMore = false; + for (const parent of parents.flat()) { + const b = await mergeBase(dir, parent, ref); + if (b) { + if (!bases.includes(b)) { + bases.push(b); + } + } else { + // we found a commit which does not have a common ancestor with + // the branch we want to merge, so we need to fetch more + fetchMore = true; + break; + } } - }], - listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + if (!fetchMore) { + const commonBase = await mergeBase(dir, ...bases); + if (!commonBase) { + throw new Error(`failed to find common base for ${bases}`); + } + return commonBase; } - }], - listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", { - mediaType: { - previews: ["squirrel-girl"] + } + await fetchDeepen(dir); + } + throw new TimeoutError(); +} + +async function fetchDeepen(dir) { + await git(dir, "fetch", "--quiet", "--deepen", FETCH_DEPTH); +} + +async function mergeBase(dir, ...refs) { + if (refs.length === 1) { + return refs[0]; + } else if (refs.length < 1) { + throw new Error("empty refs!"); + } + let todo = refs; + try { + while (todo.length > 1) { + const base = await git(dir, "merge-base", todo[0], todo[1]); + todo = [base].concat(todo.slice(2)); + } + return todo[0]; + } catch (e) { + if (e instanceof ExitError && e.code === 1) { + return null; + } else { + throw e; + } + } +} + +async function mergeCommits(dir, ref) { + return (await git(dir, "rev-list", "--parents", `${ref}..HEAD`)) + .split(/\n/g) + .map(line => line.split(/ /g).slice(1)) + .filter(commit => commit.length > 1); +} + +async function merge(dir, group, repositoryName, branch) { + return await git( + dir, + "pull", + `https://github.com/${group}/${repositoryName}`, + branch + ); +} + +async function head(dir) { + return await git(dir, "show-ref", "--head", "-s", "/HEAD"); +} + +async function sha(dir, branch) { + return await git(dir, "show-ref", "-s", `refs/remotes/origin/${branch}`); +} + +async function rebase(dir, branch) { + return await git(dir, "rebase", "--quiet", "--autosquash", branch); +} + +async function push(dir, force, branch) { + return await git( + dir, + "push", + "--quiet", + force ? "--force-with-lease" : null, + "origin", + branch + ); +} + +async function doesBranchExist(octokit, owner, repo, branch) { + assert(owner, "owner is not defined"); + assert(repo, "repo is not defined"); + assert(branch, "branch is not defined"); + try { + const { status } = await octokit.repos.getBranch({ + owner, + repo, + branch + }); + return status == 200; + } catch (e) { + logger.warn( + `project github.com/${owner}/${repo}:${branch} does not exist. It's not necessarily an error.` + ); + return false; + } +} + +/** + * Checks if there is a pull request either from a forked project or the same project + * @param {Object} octokit instance + * @param {String} owner the repo owner or group + * @param {String} repo the repository name + * @param {String} branch the branch of the pull request to look for + * @param {String} fromAuthor the pull request author + */ +async function hasPullRequest(octokit, owner, repo, branch, fromAuthor) { + return ( + (await hasForkPullRequest(octokit, owner, repo, branch, fromAuthor)) || + (await hasOriginPullRequest(octokit, owner, repo, branch)) + ); +} + +/** + * Checks if there is a pull request from a forked project + * @param {Object} octokit instance + * @param {String} owner the repo owner or group + * @param {String} repo the repository name + * @param {String} branch the branch of the pull request to look for + * @param {String} fromAuthor the pull request author + */ +async function hasForkPullRequest(octokit, owner, repo, branch, fromAuthor) { + assert(owner, "owner is not defined"); + assert(repo, "repo is not defined"); + assert(branch, "branch is not defined"); + assert(fromAuthor, "fromAuthor is not defined"); + try { + const { status, data } = await octokit.pulls.list({ + owner, + repo, + state: "open", + head: `${fromAuthor}:${branch}` + }); + return status == 200 && data.length > 0; + } catch (e) { + logger.error( + `Error getting pull request list from https://api.github.com/repos/${owner}/${repo}/pulls?head=${fromAuthor}:${branch}&state=open'".` + ); + throw e; + } +} + +/** + * Checks if there is a pull request from the same project + * @param {Object} octokit instance + * @param {String} owner the repo owner or group + * @param {String} repo the repository name + * @param {String} branch the branch of the pull request to look for + */ +async function hasOriginPullRequest(octokit, owner, repo, branch) { + assert(owner, "owner is not defined"); + assert(repo, "repo is not defined"); + assert(branch, "branch is not defined"); + try { + const { status, data } = await octokit.pulls.list({ + owner, + repo, + state: "open", + head: `${owner}:${branch}` + }); + return status == 200 && data.length > 0; + } catch (e) { + logger.error( + `Error getting pull request list from https://api.github.com/repos/${owner}/${repo}/pulls?head=${owner}:${branch}&state=open'".` + ); + throw e; + } +} + +async function getForkedProject( + octokit, + owner, + repo, + wantedOwner, + page = 1, + per_page = 100 +) { + assert(owner, "owner is not defined"); + assert(repo, "repo is not defined"); + assert(wantedOwner, "wantedOwner is not defined"); + assert(page, "page is not defined"); + try { + const { status, data } = await octokit.repos.listForks({ + owner, + repo, + page + }); + if (status == 200) { + if (data && data.length > 0) { + const forkedProject = data.find( + forkedProject => forkedProject.owner.login === wantedOwner + ); + return forkedProject + ? forkedProject + : await getForkedProject( + octokit, + owner, + repo, + wantedOwner, + ++page, + per_page + ); + } else { + return undefined; } - }] - }, - repos: { - acceptInvitation: ["PATCH /user/repository_invitations/{invitation_id}"], - addAppAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addDeployKey: ["POST /repos/{owner}/{repo}/keys", {}, { - renamed: ["repos", "createDeployKey"] - }], - addProtectedBranchAdminEnforcement: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { - renamed: ["repos", "setAdminBranchProtection"] - }], - addProtectedBranchAppRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps", - renamed: ["repos", "addAppAccessRestrictions"] - }], - addProtectedBranchRequiredSignatures: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + } + } catch (e) { + logger.error( + `Error getting forked project list from https://api.github.com/repos/${owner}/${repo}/forks?per_page=${per_page}&page=${page}'".` + ); + throw e; + } +} + +module.exports = { + ExitError, + git, + clone, + fetch, + fetchUntilMergeBase, + fetchDeepen, + mergeBase, + mergeCommits, + merge, + head, + sha, + rebase, + push, + doesBranchExist, + hasPullRequest, + getForkedProject +}; + + +/***/ }), +/* 485 */, +/* 486 */, +/* 487 */, +/* 488 */, +/* 489 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const path = __webpack_require__(622); +const which = __webpack_require__(814); +const pathKey = __webpack_require__(39)(); + +function resolveCommandAttempt(parsed, withoutPathExt) { + const cwd = process.cwd(); + const hasCustomCwd = parsed.options.cwd != null; + + // If a custom `cwd` was specified, we need to change the process cwd + // because `which` will do stat calls but does not support a custom cwd + if (hasCustomCwd) { + try { + process.chdir(parsed.options.cwd); + } catch (err) { + /* Empty */ + } + } + + let resolved; + + try { + resolved = which.sync(parsed.command, { + path: (parsed.options.env || process.env)[pathKey], + pathExt: withoutPathExt ? path.delimiter : undefined, + }); + } catch (e) { + /* Empty */ + } finally { + process.chdir(cwd); + } + + // If we successfully resolved, ensure that an absolute path is returned + // Note that when a custom `cwd` was used, we need to resolve to an absolute path based on it + if (resolved) { + resolved = path.resolve(hasCustomCwd ? parsed.options.cwd : '', resolved); + } + + return resolved; +} + +function resolveCommand(parsed) { + return resolveCommandAttempt(parsed) || resolveCommandAttempt(parsed, true); +} + +module.exports = resolveCommand; + + +/***/ }), +/* 490 */, +/* 491 */, +/* 492 */, +/* 493 */, +/* 494 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const os = __webpack_require__(87); +const execa = __webpack_require__(955); + +// Reference: https://www.gaijin.at/en/lstwinver.php +const names = new Map([ + ['10.0', '10'], + ['6.3', '8.1'], + ['6.2', '8'], + ['6.1', '7'], + ['6.0', 'Vista'], + ['5.2', 'Server 2003'], + ['5.1', 'XP'], + ['5.0', '2000'], + ['4.9', 'ME'], + ['4.1', '98'], + ['4.0', '95'] +]); + +const windowsRelease = release => { + const version = /\d+\.\d/.exec(release || os.release()); + + if (release && !version) { + throw new Error('`release` argument doesn\'t match `n.n`'); + } + + const ver = (version || [])[0]; + + // Server 2008, 2012, 2016, and 2019 versions are ambiguous with desktop versions and must be detected at runtime. + // If `release` is omitted or we're on a Windows system, and the version number is an ambiguous version + // then use `wmic` to get the OS caption: https://msdn.microsoft.com/en-us/library/aa394531(v=vs.85).aspx + // If `wmic` is obsoloete (later versions of Windows 10), use PowerShell instead. + // If the resulting caption contains the year 2008, 2012, 2016 or 2019, it is a server version, so return a server OS name. + if ((!release || release === os.release()) && ['6.1', '6.2', '6.3', '10.0'].includes(ver)) { + let stdout; + try { + stdout = execa.sync('wmic', ['os', 'get', 'Caption']).stdout || ''; + } catch (_) { + stdout = execa.sync('powershell', ['(Get-CimInstance -ClassName Win32_OperatingSystem).caption']).stdout || ''; + } + + const year = (stdout.match(/2008|2012|2016|2019/) || [])[0]; + + if (year) { + return `Server ${year}`; + } + } + + return names.get(ver); +}; + +module.exports = windowsRelease; + + +/***/ }), +/* 495 */, +/* 496 */, +/* 497 */, +/* 498 */, +/* 499 */, +/* 500 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const mkdirs = __webpack_require__(727).mkdirs +const pathExists = __webpack_require__(322).pathExists +const utimesMillis = __webpack_require__(946).utimesMillis +const stat = __webpack_require__(425) + +function copy (src, dest, opts, cb) { + if (typeof opts === 'function' && !cb) { + cb = opts + opts = {} + } else if (typeof opts === 'function') { + opts = { filter: opts } + } + + cb = cb || function () {} + opts = opts || {} + + opts.clobber = 'clobber' in opts ? !!opts.clobber : true // default to true for now + opts.overwrite = 'overwrite' in opts ? !!opts.overwrite : opts.clobber // overwrite falls back to clobber + + // Warn about using preserveTimestamps on 32-bit node + if (opts.preserveTimestamps && process.arch === 'ia32') { + console.warn(`fs-extra: Using the preserveTimestamps option in 32-bit node is not recommended;\n + see https://github.com/jprichardson/node-fs-extra/issues/269`) + } + + stat.checkPaths(src, dest, 'copy', (err, stats) => { + if (err) return cb(err) + const { srcStat, destStat } = stats + stat.checkParentPaths(src, srcStat, dest, 'copy', err => { + if (err) return cb(err) + if (opts.filter) return handleFilter(checkParentDir, destStat, src, dest, opts, cb) + return checkParentDir(destStat, src, dest, opts, cb) + }) + }) +} + +function checkParentDir (destStat, src, dest, opts, cb) { + const destParent = path.dirname(dest) + pathExists(destParent, (err, dirExists) => { + if (err) return cb(err) + if (dirExists) return startCopy(destStat, src, dest, opts, cb) + mkdirs(destParent, err => { + if (err) return cb(err) + return startCopy(destStat, src, dest, opts, cb) + }) + }) +} + +function handleFilter (onInclude, destStat, src, dest, opts, cb) { + Promise.resolve(opts.filter(src, dest)).then(include => { + if (include) return onInclude(destStat, src, dest, opts, cb) + return cb() + }, error => cb(error)) +} + +function startCopy (destStat, src, dest, opts, cb) { + if (opts.filter) return handleFilter(getStats, destStat, src, dest, opts, cb) + return getStats(destStat, src, dest, opts, cb) +} + +function getStats (destStat, src, dest, opts, cb) { + const stat = opts.dereference ? fs.stat : fs.lstat + stat(src, (err, srcStat) => { + if (err) return cb(err) + + if (srcStat.isDirectory()) return onDir(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) return onFile(srcStat, destStat, src, dest, opts, cb) + else if (srcStat.isSymbolicLink()) return onLink(destStat, src, dest, opts, cb) + }) +} + +function onFile (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return copyFile(srcStat, src, dest, opts, cb) + return mayCopyFile(srcStat, src, dest, opts, cb) +} + +function mayCopyFile (srcStat, src, dest, opts, cb) { + if (opts.overwrite) { + fs.unlink(dest, err => { + if (err) return cb(err) + return copyFile(srcStat, src, dest, opts, cb) + }) + } else if (opts.errorOnExist) { + return cb(new Error(`'${dest}' already exists`)) + } else return cb() +} + +function copyFile (srcStat, src, dest, opts, cb) { + fs.copyFile(src, dest, err => { + if (err) return cb(err) + if (opts.preserveTimestamps) return handleTimestampsAndMode(srcStat.mode, src, dest, cb) + return setDestMode(dest, srcStat.mode, cb) + }) +} + +function handleTimestampsAndMode (srcMode, src, dest, cb) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + return makeFileWritable(dest, srcMode, err => { + if (err) return cb(err) + return setDestTimestampsAndMode(srcMode, src, dest, cb) + }) + } + return setDestTimestampsAndMode(srcMode, src, dest, cb) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode, cb) { + return setDestMode(dest, srcMode | 0o200, cb) +} + +function setDestTimestampsAndMode (srcMode, src, dest, cb) { + setDestTimestamps(src, dest, err => { + if (err) return cb(err) + return setDestMode(dest, srcMode, cb) + }) +} + +function setDestMode (dest, srcMode, cb) { + return fs.chmod(dest, srcMode, cb) +} + +function setDestTimestamps (src, dest, cb) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + fs.stat(src, (err, updatedSrcStat) => { + if (err) return cb(err) + return utimesMillis(dest, updatedSrcStat.atime, updatedSrcStat.mtime, cb) + }) +} + +function onDir (srcStat, destStat, src, dest, opts, cb) { + if (!destStat) return mkDirAndCopy(srcStat.mode, src, dest, opts, cb) + if (destStat && !destStat.isDirectory()) { + return cb(new Error(`Cannot overwrite non-directory '${dest}' with directory '${src}'.`)) + } + return copyDir(src, dest, opts, cb) +} + +function mkDirAndCopy (srcMode, src, dest, opts, cb) { + fs.mkdir(dest, err => { + if (err) return cb(err) + copyDir(src, dest, opts, err => { + if (err) return cb(err) + return setDestMode(dest, srcMode, cb) + }) + }) +} + +function copyDir (src, dest, opts, cb) { + fs.readdir(src, (err, items) => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) +} + +function copyDirItems (items, src, dest, opts, cb) { + const item = items.pop() + if (!item) return cb() + return copyDirItem(items, item, src, dest, opts, cb) +} + +function copyDirItem (items, item, src, dest, opts, cb) { + const srcItem = path.join(src, item) + const destItem = path.join(dest, item) + stat.checkPaths(srcItem, destItem, 'copy', (err, stats) => { + if (err) return cb(err) + const { destStat } = stats + startCopy(destStat, srcItem, destItem, opts, err => { + if (err) return cb(err) + return copyDirItems(items, src, dest, opts, cb) + }) + }) +} + +function onLink (destStat, src, dest, opts, cb) { + fs.readlink(src, (err, resolvedSrc) => { + if (err) return cb(err) + if (opts.dereference) { + resolvedSrc = path.resolve(process.cwd(), resolvedSrc) + } + + if (!destStat) { + return fs.symlink(resolvedSrc, dest, cb) + } else { + fs.readlink(dest, (err, resolvedDest) => { + if (err) { + // dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') return fs.symlink(resolvedSrc, dest, cb) + return cb(err) + } + if (opts.dereference) { + resolvedDest = path.resolve(process.cwd(), resolvedDest) + } + if (stat.isSrcSubdir(resolvedSrc, resolvedDest)) { + return cb(new Error(`Cannot copy '${resolvedSrc}' to a subdirectory of itself, '${resolvedDest}'.`)) + } + + // do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + if (destStat.isDirectory() && stat.isSrcSubdir(resolvedDest, resolvedSrc)) { + return cb(new Error(`Cannot overwrite '${resolvedDest}' with '${resolvedSrc}'.`)) + } + return copyLink(resolvedSrc, dest, cb) + }) + } + }) +} + +function copyLink (resolvedSrc, dest, cb) { + fs.unlink(dest, err => { + if (err) return cb(err) + return fs.symlink(resolvedSrc, dest, cb) + }) +} + +module.exports = copy + + +/***/ }), +/* 501 */, +/* 502 */, +/* 503 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { run: uploadArtifacts } = __webpack_require__(395); +const { logger } = __webpack_require__(79); + +async function archiveArtifacts(nodeTirggering, nodeArray, on) { + const nodesToArchive = getNodesToArchive(nodeTirggering, nodeArray); + logger.info( + nodesToArchive.length > 0 + ? `Archiving artifacts for ${nodesToArchive.map(node => node.project)}` + : "No artifacts to archive" + ); + + await uploadNodes(nodesToArchive, on); +} + +function getNodesToArchive(nodeTriggering, nodeArray) { + const archiveDependencies = + nodeTriggering.build["archive-artifacts"] && + nodeTriggering.build["archive-artifacts"].dependencies + ? nodeTriggering.build["archive-artifacts"].dependencies + : "none"; + + return archiveDependencies === "none" + ? [nodeTriggering].filter( + node => + node.build["archive-artifacts"] && + node.build["archive-artifacts"].paths + ) + : nodeArray.filter( + node => + node.build["archive-artifacts"] && + node.build["archive-artifacts"].paths && + (archiveDependencies === "all" || + archiveDependencies.includes(node.project) || + node.project === nodeTriggering.project) + ); +} + +async function uploadNodes(nodesToArchive, on) { + await Promise.allSettled( + nodesToArchive.map(async node => { + logger.info(`Project [${node.project}]. Uploading artifacts...`); + const uploadResponse = await uploadArtifacts( + node.build["archive-artifacts"], + on + ); + if (uploadResponse) { + const uploadArtifactsMessage = + uploadResponse.artifactItems && + uploadResponse.artifactItems.length > 0 + ? `Uploaded Items (${uploadResponse.artifactItems.length}): ${uploadResponse.artifactItems}.` + : ""; + if ( + uploadResponse.failedItems && + uploadResponse.failedItems.length > 0 + ) { + logger.error( + `Project [${node.project}] Failed State. Artifact [${uploadResponse.artifactName}]. Failed Items (${uploadResponse.failedItems.length}): ${uploadResponse.failedItems}. ${uploadArtifactsMessage}` + ); + return Promise.reject(uploadResponse); + } else { + logger.info( + `Project [${node.project}]. Artifact [${uploadResponse.artifactName}]. ${uploadArtifactsMessage}` + ); + return Promise.resolve(uploadResponse); + } + } else { + logger.info(`Project [${node.project}]. No artifacts uploaded`); + return Promise.resolve(undefined); } - }, { - renamed: ["repos", "createCommitSignatureProtection"] - }], - addProtectedBranchRequiredStatusChecksContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts", - renamed: ["repos", "addStatusCheckContexts"] - }], - addProtectedBranchTeamRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams", - renamed: ["repos", "addTeamAccessRestrictions"] - }], - addProtectedBranchUserRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users", - renamed: ["repos", "addUserAccessRestrictions"] - }], - addStatusCheckContexts: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - addTeamAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - addUserAccessRestrictions: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] + }) + ).then(promises => { + logger.info("-------------- ARCHIVE ARTIFACTS SUMMARY --------------"); + const totalUploadResponses = promises + .map(promiseResult => promiseResult.value || promiseResult.reason) + .filter( + uploadResponse => + uploadResponse && + uploadResponse.artifactItems && + uploadResponse.artifactItems.length > 0 + ); + const uploadedFiles = totalUploadResponses.flatMap( + uploadResponse => uploadResponse.artifactItems + ); + const failureUploadResponses = promises + .filter(promiseResult => promiseResult.reason) + .map(promiseResult => promiseResult.reason) + .filter( + uploadResponse => + uploadResponse && + uploadResponse.failedItems && + uploadResponse.failedItems.length > 0 + ); + const failedFiles = failureUploadResponses.flatMap( + uploadResponse => uploadResponse.failedItems + ); + logger.info( + `Artifacts uploaded (${ + totalUploadResponses.length + }): ${totalUploadResponses.map( + uploadResponse => uploadResponse.artifactName + )}. Files (${uploadedFiles.length}): ${uploadedFiles}` + ); + logger.info( + `Artifacts failed (${ + failureUploadResponses.length + }): ${failureUploadResponses.map( + uploadResponse => uploadResponse.artifactName + )}. Files (${failedFiles.length}): ${failedFiles}` + ); + }); +} + +module.exports = { archiveArtifacts }; + + +/***/ }), +/* 504 */, +/* 505 */, +/* 506 */, +/* 507 */, +/* 508 */, +/* 509 */, +/* 510 */ +/***/ (function(module) { + +module.exports = addHook + +function addHook (state, kind, name, hook) { + var orig = hook + if (!state.registry[name]) { + state.registry[name] = [] + } + + if (kind === 'before') { + hook = function (method, options) { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)) + } + } + + if (kind === 'after') { + hook = function (method, options) { + var result + return Promise.resolve() + .then(method.bind(null, options)) + .then(function (result_) { + result = result_ + return orig(result, options) + }) + .then(function () { + return result + }) + } + } + + if (kind === 'error') { + hook = function (method, options) { + return Promise.resolve() + .then(method.bind(null, options)) + .catch(function (error) { + return orig(error, options) + }) + } + } + + state.registry[name].push({ + hook: hook, + orig: orig + }) +} + + +/***/ }), +/* 511 */, +/* 512 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const path = __webpack_require__(622); +const pathKey = __webpack_require__(39); + +module.exports = opts => { + opts = Object.assign({ + cwd: process.cwd(), + path: process.env[pathKey()] + }, opts); + + let prev; + let pth = path.resolve(opts.cwd); + const ret = []; + + while (prev !== pth) { + ret.push(path.join(pth, 'node_modules/.bin')); + prev = pth; + pth = path.resolve(pth, '..'); + } + + // ensure the running `node` binary is used + ret.push(path.dirname(process.execPath)); + + return ret.concat(opts.path).join(path.delimiter); +}; + +module.exports.env = opts => { + opts = Object.assign({ + env: process.env + }, opts); + + const env = Object.assign({}, opts.env); + const path = pathKey({env}); + + opts.path = env[path]; + env[path] = module.exports(opts); + + return env; +}; + + +/***/ }), +/* 513 */, +/* 514 */, +/* 515 */, +/* 516 */, +/* 517 */ +/***/ (function(module) { + +function treat(command) { + return `${command} -Dorg.slf4j.simpleLogger.log.org.apache.maven.cli.transfer.Slf4jMavenTransferListener=warn -B`; +} + +module.exports = { + treat +}; + + +/***/ }), +/* 518 */, +/* 519 */, +/* 520 */, +/* 521 */, +/* 522 */, +/* 523 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var register = __webpack_require__(363) +var addHook = __webpack_require__(510) +var removeHook = __webpack_require__(763) + +// bind with array of arguments: https://stackoverflow.com/a/21792913 +var bind = Function.bind +var bindable = bind.bind(bind) + +function bindApi (hook, state, name) { + var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) + hook.api = { remove: removeHookRef } + hook.remove = removeHookRef + + ;['before', 'error', 'after', 'wrap'].forEach(function (kind) { + var args = name ? [state, kind, name] : [state, kind] + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) + }) +} + +function HookSingular () { + var singularHookName = 'h' + var singularHookState = { + registry: {} + } + var singularHook = register.bind(null, singularHookState, singularHookName) + bindApi(singularHook, singularHookState, singularHookName) + return singularHook +} + +function HookCollection () { + var state = { + registry: {} + } + + var hook = register.bind(null, state) + bindApi(hook, state) + + return hook +} + +var collectionHookDeprecationMessageDisplayed = false +function Hook () { + if (!collectionHookDeprecationMessageDisplayed) { + console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') + collectionHookDeprecationMessageDisplayed = true + } + return HookCollection() +} + +Hook.Singular = HookSingular.bind() +Hook.Collection = HookCollection.bind() + +module.exports = Hook +// expose constructors as a named property for TypeScript +module.exports.Hook = Hook +module.exports.Singular = Hook.Singular +module.exports.Collection = Hook.Collection + + +/***/ }), +/* 524 */, +/* 525 */, +/* 526 */, +/* 527 */, +/* 528 */, +/* 529 */, +/* 530 */, +/* 531 */, +/* 532 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path = __importStar(__webpack_require__(622)); +/** + * Creates a specification for a set of files that will be downloaded + * @param artifactName the name of the artifact + * @param artifactEntries a set of container entries that describe that files that make up an artifact + * @param downloadPath the path where the artifact will be downloaded to + * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to + */ +function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) { + // use a set for the directory paths so that there are no duplicates + const directories = new Set(); + const specifications = { + rootDownloadLocation: includeRootDirectory + ? path.join(downloadPath, artifactName) + : downloadPath, + directoryStructure: [], + emptyFilesToCreate: [], + filesToDownload: [] + }; + for (const entry of artifactEntries) { + // Ignore artifacts in the container that don't begin with the same name + if (entry.path.startsWith(`${artifactName}/`) || + entry.path.startsWith(`${artifactName}\\`)) { + // normalize all separators to the local OS + const normalizedPathEntry = path.normalize(entry.path); + // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path + const filePath = path.join(downloadPath, includeRootDirectory + ? normalizedPathEntry + : normalizedPathEntry.replace(artifactName, '')); + // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder' + // itemType cannot be relied upon. The file must be used to determine the directory structure + if (entry.itemType === 'file') { + // Get the directories that we need to create from the filePath for each individual file + directories.add(path.dirname(filePath)); + if (entry.fileLength === 0) { + // An empty file was uploaded, create the empty files locally so that no extra http calls are made + specifications.emptyFilesToCreate.push(filePath); + } + else { + specifications.filesToDownload.push({ + sourceLocation: entry.contentLocation, + targetPath: filePath + }); + } + } + } + } + specifications.directoryStructure = Array.from(directories); + return specifications; +} +exports.getDownloadSpecification = getDownloadSpecification; +//# sourceMappingURL=download-specification.js.map + +/***/ }), +/* 533 */, +/* 534 */, +/* 535 */, +/* 536 */, +/* 537 */, +/* 538 */, +/* 539 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const http = __webpack_require__(605); +const https = __webpack_require__(211); +const pm = __webpack_require__(950); +let tunnel; +var HttpCodes; +(function (HttpCodes) { + HttpCodes[HttpCodes["OK"] = 200] = "OK"; + HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; + HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; + HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; + HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; + HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; + HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy"; + HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; + HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; + HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; + HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; + HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; + HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired"; + HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; + HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound"; + HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed"; + HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable"; + HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired"; + HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout"; + HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict"; + HttpCodes[HttpCodes["Gone"] = 410] = "Gone"; + HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests"; + HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError"; + HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented"; + HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway"; + HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable"; + HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout"; +})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {})); +var Headers; +(function (Headers) { + Headers["Accept"] = "accept"; + Headers["ContentType"] = "content-type"; +})(Headers = exports.Headers || (exports.Headers = {})); +var MediaTypes; +(function (MediaTypes) { + MediaTypes["ApplicationJson"] = "application/json"; +})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {})); +/** + * Returns the proxy URL, depending upon the supplied url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ +function getProxyUrl(serverUrl) { + let proxyUrl = pm.getProxyUrl(new URL(serverUrl)); + return proxyUrl ? proxyUrl.href : ''; +} +exports.getProxyUrl = getProxyUrl; +const HttpRedirectCodes = [ + HttpCodes.MovedPermanently, + HttpCodes.ResourceMoved, + HttpCodes.SeeOther, + HttpCodes.TemporaryRedirect, + HttpCodes.PermanentRedirect +]; +const HttpResponseRetryCodes = [ + HttpCodes.BadGateway, + HttpCodes.ServiceUnavailable, + HttpCodes.GatewayTimeout +]; +const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD']; +const ExponentialBackoffCeiling = 10; +const ExponentialBackoffTimeSlice = 5; +class HttpClientError extends Error { + constructor(message, statusCode) { + super(message); + this.name = 'HttpClientError'; + this.statusCode = statusCode; + Object.setPrototypeOf(this, HttpClientError.prototype); + } +} +exports.HttpClientError = HttpClientError; +class HttpClientResponse { + constructor(message) { + this.message = message; + } + readBody() { + return new Promise(async (resolve, reject) => { + let output = Buffer.alloc(0); + this.message.on('data', (chunk) => { + output = Buffer.concat([output, chunk]); + }); + this.message.on('end', () => { + resolve(output.toString()); + }); + }); + } +} +exports.HttpClientResponse = HttpClientResponse; +function isHttps(requestUrl) { + let parsedUrl = new URL(requestUrl); + return parsedUrl.protocol === 'https:'; +} +exports.isHttps = isHttps; +class HttpClient { + constructor(userAgent, handlers, requestOptions) { + this._ignoreSslError = false; + this._allowRedirects = true; + this._allowRedirectDowngrade = false; + this._maxRedirects = 50; + this._allowRetries = false; + this._maxRetries = 1; + this._keepAlive = false; + this._disposed = false; + this.userAgent = userAgent; + this.handlers = handlers || []; + this.requestOptions = requestOptions; + if (requestOptions) { + if (requestOptions.ignoreSslError != null) { + this._ignoreSslError = requestOptions.ignoreSslError; + } + this._socketTimeout = requestOptions.socketTimeout; + if (requestOptions.allowRedirects != null) { + this._allowRedirects = requestOptions.allowRedirects; + } + if (requestOptions.allowRedirectDowngrade != null) { + this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade; + } + if (requestOptions.maxRedirects != null) { + this._maxRedirects = Math.max(requestOptions.maxRedirects, 0); + } + if (requestOptions.keepAlive != null) { + this._keepAlive = requestOptions.keepAlive; + } + if (requestOptions.allowRetries != null) { + this._allowRetries = requestOptions.allowRetries; + } + if (requestOptions.maxRetries != null) { + this._maxRetries = requestOptions.maxRetries; + } + } + } + options(requestUrl, additionalHeaders) { + return this.request('OPTIONS', requestUrl, null, additionalHeaders || {}); + } + get(requestUrl, additionalHeaders) { + return this.request('GET', requestUrl, null, additionalHeaders || {}); + } + del(requestUrl, additionalHeaders) { + return this.request('DELETE', requestUrl, null, additionalHeaders || {}); + } + post(requestUrl, data, additionalHeaders) { + return this.request('POST', requestUrl, data, additionalHeaders || {}); + } + patch(requestUrl, data, additionalHeaders) { + return this.request('PATCH', requestUrl, data, additionalHeaders || {}); + } + put(requestUrl, data, additionalHeaders) { + return this.request('PUT', requestUrl, data, additionalHeaders || {}); + } + head(requestUrl, additionalHeaders) { + return this.request('HEAD', requestUrl, null, additionalHeaders || {}); + } + sendStream(verb, requestUrl, stream, additionalHeaders) { + return this.request(verb, requestUrl, stream, additionalHeaders); + } + /** + * Gets a typed object from an endpoint + * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise + */ + async getJson(requestUrl, additionalHeaders = {}) { + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + let res = await this.get(requestUrl, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async postJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.post(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async putJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.put(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + async patchJson(requestUrl, obj, additionalHeaders = {}) { + let data = JSON.stringify(obj, null, 2); + additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson); + additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson); + let res = await this.patch(requestUrl, data, additionalHeaders); + return this._processResponse(res, this.requestOptions); + } + /** + * Makes a raw http request. + * All other methods such as get, post, patch, and request ultimately call this. + * Prefer get, del, post and patch + */ + async request(verb, requestUrl, data, headers) { + if (this._disposed) { + throw new Error('Client has already been disposed.'); + } + let parsedUrl = new URL(requestUrl); + let info = this._prepareRequest(verb, parsedUrl, headers); + // Only perform retries on reads since writes may not be idempotent. + let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1 + ? this._maxRetries + 1 + : 1; + let numTries = 0; + let response; + while (numTries < maxTries) { + response = await this.requestRaw(info, data); + // Check if it's an authentication challenge + if (response && + response.message && + response.message.statusCode === HttpCodes.Unauthorized) { + let authenticationHandler; + for (let i = 0; i < this.handlers.length; i++) { + if (this.handlers[i].canHandleAuthentication(response)) { + authenticationHandler = this.handlers[i]; + break; + } + } + if (authenticationHandler) { + return authenticationHandler.handleAuthentication(this, info, data); + } + else { + // We have received an unauthorized response but have no handlers to handle it. + // Let the response return to the caller. + return response; + } + } + let redirectsRemaining = this._maxRedirects; + while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 && + this._allowRedirects && + redirectsRemaining > 0) { + const redirectUrl = response.message.headers['location']; + if (!redirectUrl) { + // if there's no location to redirect to, we won't + break; + } + let parsedRedirectUrl = new URL(redirectUrl); + if (parsedUrl.protocol == 'https:' && + parsedUrl.protocol != parsedRedirectUrl.protocol && + !this._allowRedirectDowngrade) { + throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.'); + } + // we need to finish reading the response before reassigning response + // which will leak the open socket. + await response.readBody(); + // strip authorization header if redirected to a different hostname + if (parsedRedirectUrl.hostname !== parsedUrl.hostname) { + for (let header in headers) { + // header names are case insensitive + if (header.toLowerCase() === 'authorization') { + delete headers[header]; + } + } + } + // let's make the request with the new redirectUrl + info = this._prepareRequest(verb, parsedRedirectUrl, headers); + response = await this.requestRaw(info, data); + redirectsRemaining--; + } + if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) { + // If not a retry code, return immediately instead of retrying + return response; + } + numTries += 1; + if (numTries < maxTries) { + await response.readBody(); + await this._performExponentialBackoff(numTries); + } + } + return response; + } + /** + * Needs to be called if keepAlive is set to true in request options. + */ + dispose() { + if (this._agent) { + this._agent.destroy(); + } + this._disposed = true; + } + /** + * Raw request. + * @param info + * @param data + */ + requestRaw(info, data) { + return new Promise((resolve, reject) => { + let callbackForResult = function (err, res) { + if (err) { + reject(err); + } + resolve(res); + }; + this.requestRawWithCallback(info, data, callbackForResult); + }); + } + /** + * Raw request with callback. + * @param info + * @param data + * @param onResult + */ + requestRawWithCallback(info, data, onResult) { + let socket; + if (typeof data === 'string') { + info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8'); + } + let callbackCalled = false; + let handleResult = (err, res) => { + if (!callbackCalled) { + callbackCalled = true; + onResult(err, res); + } + }; + let req = info.httpModule.request(info.options, (msg) => { + let res = new HttpClientResponse(msg); + handleResult(null, res); + }); + req.on('socket', sock => { + socket = sock; + }); + // If we ever get disconnected, we want the socket to timeout eventually + req.setTimeout(this._socketTimeout || 3 * 60000, () => { + if (socket) { + socket.end(); + } + handleResult(new Error('Request timeout: ' + info.options.path), null); + }); + req.on('error', function (err) { + // err has statusCode property + // res should have headers + handleResult(err, null); + }); + if (data && typeof data === 'string') { + req.write(data, 'utf8'); + } + if (data && typeof data !== 'string') { + data.on('close', function () { + req.end(); + }); + data.pipe(req); + } + else { + req.end(); + } + } + /** + * Gets an http agent. This function is useful when you need an http agent that handles + * routing through a proxy server - depending upon the url and proxy environment variables. + * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com + */ + getAgent(serverUrl) { + let parsedUrl = new URL(serverUrl); + return this._getAgent(parsedUrl); + } + _prepareRequest(method, requestUrl, headers) { + const info = {}; + info.parsedUrl = requestUrl; + const usingSsl = info.parsedUrl.protocol === 'https:'; + info.httpModule = usingSsl ? https : http; + const defaultPort = usingSsl ? 443 : 80; + info.options = {}; + info.options.host = info.parsedUrl.hostname; + info.options.port = info.parsedUrl.port + ? parseInt(info.parsedUrl.port) + : defaultPort; + info.options.path = + (info.parsedUrl.pathname || '') + (info.parsedUrl.search || ''); + info.options.method = method; + info.options.headers = this._mergeHeaders(headers); + if (this.userAgent != null) { + info.options.headers['user-agent'] = this.userAgent; + } + info.options.agent = this._getAgent(info.parsedUrl); + // gives handlers an opportunity to participate + if (this.handlers) { + this.handlers.forEach(handler => { + handler.prepareRequest(info.options); + }); + } + return info; + } + _mergeHeaders(headers) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + if (this.requestOptions && this.requestOptions.headers) { + return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers)); + } + return lowercaseKeys(headers || {}); + } + _getExistingOrDefaultHeader(additionalHeaders, header, _default) { + const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {}); + let clientHeader; + if (this.requestOptions && this.requestOptions.headers) { + clientHeader = lowercaseKeys(this.requestOptions.headers)[header]; + } + return additionalHeaders[header] || clientHeader || _default; + } + _getAgent(parsedUrl) { + let agent; + let proxyUrl = pm.getProxyUrl(parsedUrl); + let useProxy = proxyUrl && proxyUrl.hostname; + if (this._keepAlive && useProxy) { + agent = this._proxyAgent; + } + if (this._keepAlive && !useProxy) { + agent = this._agent; + } + // if agent is already assigned use that agent. + if (!!agent) { + return agent; + } + const usingSsl = parsedUrl.protocol === 'https:'; + let maxSockets = 100; + if (!!this.requestOptions) { + maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets; + } + if (useProxy) { + // If using proxy, need tunnel + if (!tunnel) { + tunnel = __webpack_require__(413); + } + const agentOptions = { + maxSockets: maxSockets, + keepAlive: this._keepAlive, + proxy: { + proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`, + host: proxyUrl.hostname, + port: proxyUrl.port + } + }; + let tunnelAgent; + const overHttps = proxyUrl.protocol === 'https:'; + if (usingSsl) { + tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp; + } + else { + tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp; + } + agent = tunnelAgent(agentOptions); + this._proxyAgent = agent; + } + // if reusing agent across request and tunneling agent isn't assigned create a new agent + if (this._keepAlive && !agent) { + const options = { keepAlive: this._keepAlive, maxSockets: maxSockets }; + agent = usingSsl ? new https.Agent(options) : new http.Agent(options); + this._agent = agent; + } + // if not using private agent and tunnel agent isn't setup then use global agent + if (!agent) { + agent = usingSsl ? https.globalAgent : http.globalAgent; + } + if (usingSsl && this._ignoreSslError) { + // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process + // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options + // we have to cast it to any and change it directly + agent.options = Object.assign(agent.options || {}, { + rejectUnauthorized: false + }); + } + return agent; + } + _performExponentialBackoff(retryNumber) { + retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber); + const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber); + return new Promise(resolve => setTimeout(() => resolve(), ms)); + } + static dateTimeDeserializer(key, value) { + if (typeof value === 'string') { + let a = new Date(value); + if (!isNaN(a.valueOf())) { + return a; + } + } + return value; + } + async _processResponse(res, options) { + return new Promise(async (resolve, reject) => { + const statusCode = res.message.statusCode; + const response = { + statusCode: statusCode, + result: null, + headers: {} + }; + // not found leads to null obj returned + if (statusCode == HttpCodes.NotFound) { + resolve(response); + } + let obj; + let contents; + // get the result from the body + try { + contents = await res.readBody(); + if (contents && contents.length > 0) { + if (options && options.deserializeDates) { + obj = JSON.parse(contents, HttpClient.dateTimeDeserializer); + } + else { + obj = JSON.parse(contents); + } + response.result = obj; + } + response.headers = res.message.headers; + } + catch (err) { + // Invalid resource (contents not json); leaving result obj null + } + // note that 3xx redirects are handled by the http layer. + if (statusCode > 299) { + let msg; + // if exception/error in body, attempt to get better error + if (obj && obj.message) { + msg = obj.message; + } + else if (contents && contents.length > 0) { + // it may be the case that the exception is in the body message as string + msg = contents; + } + else { + msg = 'Failed request: (' + statusCode + ')'; + } + let err = new HttpClientError(msg, statusCode); + err.result = response.result; + reject(err); + } + else { + resolve(response); + } + }); + } +} +exports.HttpClient = HttpClient; + + +/***/ }), +/* 540 */, +/* 541 */, +/* 542 */, +/* 543 */, +/* 544 */, +/* 545 */, +/* 546 */, +/* 547 */, +/* 548 */, +/* 549 */, +/* 550 */, +/* 551 */, +/* 552 */, +/* 553 */, +/* 554 */, +/* 555 */, +/* 556 */ +/***/ (function(module) { + +"use strict"; +// YAML error class. http://stackoverflow.com/questions/8458984 +// + + +function YAMLException(reason, mark) { + // Super constructor + Error.call(this); + + this.name = 'YAMLException'; + this.reason = reason; + this.mark = mark; + this.message = (this.reason || '(unknown reason)') + (this.mark ? ' ' + this.mark.toString() : ''); + + // Include stack trace in error object + if (Error.captureStackTrace) { + // Chrome and NodeJS + Error.captureStackTrace(this, this.constructor); + } else { + // FF, IE 10+ and Safari 6+. Fallback for others + this.stack = (new Error()).stack || ''; + } +} + + +// Inherit from Error +YAMLException.prototype = Object.create(Error.prototype); +YAMLException.prototype.constructor = YAMLException; + + +YAMLException.prototype.toString = function toString(compact) { + var result = this.name + ': '; + + result += this.reason || '(unknown reason)'; + + if (!compact && this.mark) { + result += ' ' + this.mark.toString(); + } + + return result; +}; + + +module.exports = YAMLException; + + +/***/ }), +/* 557 */, +/* 558 */, +/* 559 */, +/* 560 */, +/* 561 */, +/* 562 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var once = __webpack_require__(49); + +var noop = function() {}; + +var isRequest = function(stream) { + return stream.setHeader && typeof stream.abort === 'function'; +}; + +var isChildProcess = function(stream) { + return stream.stdio && Array.isArray(stream.stdio) && stream.stdio.length === 3 +}; + +var eos = function(stream, opts, callback) { + if (typeof opts === 'function') return eos(stream, null, opts); + if (!opts) opts = {}; + + callback = once(callback || noop); + + var ws = stream._writableState; + var rs = stream._readableState; + var readable = opts.readable || (opts.readable !== false && stream.readable); + var writable = opts.writable || (opts.writable !== false && stream.writable); + var cancelled = false; + + var onlegacyfinish = function() { + if (!stream.writable) onfinish(); + }; + + var onfinish = function() { + writable = false; + if (!readable) callback.call(stream); + }; + + var onend = function() { + readable = false; + if (!writable) callback.call(stream); + }; + + var onexit = function(exitCode) { + callback.call(stream, exitCode ? new Error('exited with error code: ' + exitCode) : null); + }; + + var onerror = function(err) { + callback.call(stream, err); + }; + + var onclose = function() { + process.nextTick(onclosenexttick); + }; + + var onclosenexttick = function() { + if (cancelled) return; + if (readable && !(rs && (rs.ended && !rs.destroyed))) return callback.call(stream, new Error('premature close')); + if (writable && !(ws && (ws.ended && !ws.destroyed))) return callback.call(stream, new Error('premature close')); + }; + + var onrequest = function() { + stream.req.on('finish', onfinish); + }; + + if (isRequest(stream)) { + stream.on('complete', onfinish); + stream.on('abort', onclose); + if (stream.req) onrequest(); + else stream.on('request', onrequest); + } else if (writable && !ws) { // legacy streams + stream.on('end', onlegacyfinish); + stream.on('close', onlegacyfinish); + } + + if (isChildProcess(stream)) stream.on('exit', onexit); + + stream.on('end', onend); + stream.on('finish', onfinish); + if (opts.error !== false) stream.on('error', onerror); + stream.on('close', onclose); + + return function() { + cancelled = true; + stream.removeListener('complete', onfinish); + stream.removeListener('abort', onclose); + stream.removeListener('request', onrequest); + if (stream.req) stream.req.removeListener('finish', onfinish); + stream.removeListener('end', onlegacyfinish); + stream.removeListener('close', onlegacyfinish); + stream.removeListener('finish', onfinish); + stream.removeListener('exit', onexit); + stream.removeListener('end', onend); + stream.removeListener('error', onerror); + stream.removeListener('close', onclose); + }; +}; + +module.exports = eos; + + +/***/ }), +/* 563 */, +/* 564 */, +/* 565 */, +/* 566 */, +/* 567 */, +/* 568 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const path = __webpack_require__(622); +const niceTry = __webpack_require__(948); +const resolveCommand = __webpack_require__(489); +const escape = __webpack_require__(462); +const readShebang = __webpack_require__(389); +const semver = __webpack_require__(280); + +const isWin = process.platform === 'win32'; +const isExecutableRegExp = /\.(?:com|exe)$/i; +const isCmdShimRegExp = /node_modules[\\/].bin[\\/][^\\/]+\.cmd$/i; + +// `options.shell` is supported in Node ^4.8.0, ^5.7.0 and >= 6.0.0 +const supportsShellOption = niceTry(() => semver.satisfies(process.version, '^4.8.0 || ^5.7.0 || >= 6.0.0', true)) || false; + +function detectShebang(parsed) { + parsed.file = resolveCommand(parsed); + + const shebang = parsed.file && readShebang(parsed.file); + + if (shebang) { + parsed.args.unshift(parsed.file); + parsed.command = shebang; + + return resolveCommand(parsed); + } + + return parsed.file; +} + +function parseNonShell(parsed) { + if (!isWin) { + return parsed; + } + + // Detect & add support for shebangs + const commandFile = detectShebang(parsed); + + // We don't need a shell if the command filename is an executable + const needsShell = !isExecutableRegExp.test(commandFile); + + // If a shell is required, use cmd.exe and take care of escaping everything correctly + // Note that `forceShell` is an hidden option used only in tests + if (parsed.options.forceShell || needsShell) { + // Need to double escape meta chars if the command is a cmd-shim located in `node_modules/.bin/` + // The cmd-shim simply calls execute the package bin file with NodeJS, proxying any argument + // Because the escape of metachars with ^ gets interpreted when the cmd.exe is first called, + // we need to double escape them + const needsDoubleEscapeMetaChars = isCmdShimRegExp.test(commandFile); + + // Normalize posix paths into OS compatible paths (e.g.: foo/bar -> foo\bar) + // This is necessary otherwise it will always fail with ENOENT in those cases + parsed.command = path.normalize(parsed.command); + + // Escape command & arguments + parsed.command = escape.command(parsed.command); + parsed.args = parsed.args.map((arg) => escape.argument(arg, needsDoubleEscapeMetaChars)); + + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.command = process.env.comspec || 'cmd.exe'; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } + + return parsed; +} + +function parseShell(parsed) { + // If node supports the shell option, there's no need to mimic its behavior + if (supportsShellOption) { + return parsed; + } + + // Mimic node shell option + // See https://github.com/nodejs/node/blob/b9f6a2dc059a1062776133f3d4fd848c4da7d150/lib/child_process.js#L335 + const shellCommand = [parsed.command].concat(parsed.args).join(' '); + + if (isWin) { + parsed.command = typeof parsed.options.shell === 'string' ? parsed.options.shell : process.env.comspec || 'cmd.exe'; + parsed.args = ['/d', '/s', '/c', `"${shellCommand}"`]; + parsed.options.windowsVerbatimArguments = true; // Tell node's spawn that the arguments are already escaped + } else { + if (typeof parsed.options.shell === 'string') { + parsed.command = parsed.options.shell; + } else if (process.platform === 'android') { + parsed.command = '/system/bin/sh'; + } else { + parsed.command = '/bin/sh'; + } + + parsed.args = ['-c', shellCommand]; + } + + return parsed; +} + +function parse(command, args, options) { + // Normalize arguments, similar to nodejs + if (args && !Array.isArray(args)) { + options = args; + args = null; + } + + args = args ? args.slice(0) : []; // Clone array to avoid changing the original + options = Object.assign({}, options); // Clone object to avoid changing the original + + // Build our parsed object + const parsed = { + command, + args, + options, + file: undefined, + original: { + command, + args, + }, + }; + + // Delegate further parsing to shell or non-shell + return options.shell ? parseShell(parsed) : parseNonShell(parsed); +} + +module.exports = parse; + + +/***/ }), +/* 569 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = rimraf +rimraf.sync = rimrafSync + +var assert = __webpack_require__(357) +var path = __webpack_require__(622) +var fs = __webpack_require__(747) +var glob = undefined +try { + glob = __webpack_require__(120) +} catch (_err) { + // treat glob as optional. +} +var _0666 = parseInt('666', 8) + +var defaultGlobOpts = { + nosort: true, + silent: true +} + +// for EMFILE handling +var timeout = 0 + +var isWindows = (process.platform === "win32") + +function defaults (options) { + var methods = [ + 'unlink', + 'chmod', + 'stat', + 'lstat', + 'rmdir', + 'readdir' + ] + methods.forEach(function(m) { + options[m] = options[m] || fs[m] + m = m + 'Sync' + options[m] = options[m] || fs[m] + }) + + options.maxBusyTries = options.maxBusyTries || 3 + options.emfileWait = options.emfileWait || 1000 + if (options.glob === false) { + options.disableGlob = true + } + if (options.disableGlob !== true && glob === undefined) { + throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') + } + options.disableGlob = options.disableGlob || false + options.glob = options.glob || defaultGlobOpts +} + +function rimraf (p, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert.equal(typeof cb, 'function', 'rimraf: callback function required') + assert(options, 'rimraf: invalid options argument provided') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + defaults(options) + + var busyTries = 0 + var errState = null + var n = 0 + + if (options.disableGlob || !glob.hasMagic(p)) + return afterGlob(null, [p]) + + options.lstat(p, function (er, stat) { + if (!er) + return afterGlob(null, [p]) + + glob(p, options.glob, afterGlob) + }) + + function next (er) { + errState = errState || er + if (--n === 0) + cb(errState) + } + + function afterGlob (er, results) { + if (er) + return cb(er) + + n = results.length + if (n === 0) + return cb() + + results.forEach(function (p) { + rimraf_(p, options, function CB (er) { + if (er) { + if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && + busyTries < options.maxBusyTries) { + busyTries ++ + var time = busyTries * 100 + // try again, with the same exact callback as this one. + return setTimeout(function () { + rimraf_(p, options, CB) + }, time) + } + + // this one won't happen if graceful-fs is used. + if (er.code === "EMFILE" && timeout < options.emfileWait) { + return setTimeout(function () { + rimraf_(p, options, CB) + }, timeout ++) + } + + // already gone + if (er.code === "ENOENT") er = null + } + + timeout = 0 + next(er) + }) + }) + } +} + +// Two possible strategies. +// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR +// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR +// +// Both result in an extra syscall when you guess wrong. However, there +// are likely far more normal files in the world than directories. This +// is based on the assumption that a the average number of files per +// directory is >= 1. +// +// If anyone ever complains about this, then I guess the strategy could +// be made configurable somehow. But until then, YAGNI. +function rimraf_ (p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + // sunos lets the root user unlink directories, which is... weird. + // so we have to lstat here and make sure it's not a dir. + options.lstat(p, function (er, st) { + if (er && er.code === "ENOENT") + return cb(null) + + // Windows can EPERM on stat. Life is suffering. + if (er && er.code === "EPERM" && isWindows) + fixWinEPERM(p, options, er, cb) + + if (st && st.isDirectory()) + return rmdir(p, options, er, cb) + + options.unlink(p, function (er) { + if (er) { + if (er.code === "ENOENT") + return cb(null) + if (er.code === "EPERM") + return (isWindows) + ? fixWinEPERM(p, options, er, cb) + : rmdir(p, options, er, cb) + if (er.code === "EISDIR") + return rmdir(p, options, er, cb) } - }], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - createCommitComment: ["POST /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - createCommitSignatureProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + return cb(er) + }) + }) +} + +function fixWinEPERM (p, options, er, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + if (er) + assert(er instanceof Error) + + options.chmod(p, _0666, function (er2) { + if (er2) + cb(er2.code === "ENOENT" ? null : er) + else + options.stat(p, function(er3, stats) { + if (er3) + cb(er3.code === "ENOENT" ? null : er) + else if (stats.isDirectory()) + rmdir(p, options, er, cb) + else + options.unlink(p, cb) + }) + }) +} + +function fixWinEPERMSync (p, options, er) { + assert(p) + assert(options) + if (er) + assert(er instanceof Error) + + try { + options.chmodSync(p, _0666) + } catch (er2) { + if (er2.code === "ENOENT") + return + else + throw er + } + + try { + var stats = options.statSync(p) + } catch (er3) { + if (er3.code === "ENOENT") + return + else + throw er + } + + if (stats.isDirectory()) + rmdirSync(p, options, er) + else + options.unlinkSync(p) +} + +function rmdir (p, options, originalEr, cb) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + assert(typeof cb === 'function') + + // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) + // if we guessed wrong, and it's not a directory, then + // raise the original error. + options.rmdir(p, function (er) { + if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) + rmkids(p, options, cb) + else if (er && er.code === "ENOTDIR") + cb(originalEr) + else + cb(er) + }) +} + +function rmkids(p, options, cb) { + assert(p) + assert(options) + assert(typeof cb === 'function') + + options.readdir(p, function (er, files) { + if (er) + return cb(er) + var n = files.length + if (n === 0) + return options.rmdir(p, cb) + var errState + files.forEach(function (f) { + rimraf(path.join(p, f), options, function (er) { + if (errState) + return + if (er) + return cb(errState = er) + if (--n === 0) + options.rmdir(p, cb) + }) + }) + }) +} + +// this looks simpler, and is strictly *faster*, but will +// tie up the JavaScript thread and fail on excessively +// deep directory trees. +function rimrafSync (p, options) { + options = options || {} + defaults(options) + + assert(p, 'rimraf: missing path') + assert.equal(typeof p, 'string', 'rimraf: path should be a string') + assert(options, 'rimraf: missing options') + assert.equal(typeof options, 'object', 'rimraf: options should be object') + + var results + + if (options.disableGlob || !glob.hasMagic(p)) { + results = [p] + } else { + try { + options.lstatSync(p) + results = [p] + } catch (er) { + results = glob.sync(p, options.glob) + } + } + + if (!results.length) + return + + for (var i = 0; i < results.length; i++) { + var p = results[i] + + try { + var st = options.lstatSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + + // Windows can EPERM on stat. Life is suffering. + if (er.code === "EPERM" && isWindows) + fixWinEPERMSync(p, options, er) + } + + try { + // sunos lets the root user unlink directories, which is... weird. + if (st && st.isDirectory()) + rmdirSync(p, options, null) + else + options.unlinkSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "EPERM") + return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) + if (er.code !== "EISDIR") + throw er + + rmdirSync(p, options, er) + } + } +} + +function rmdirSync (p, options, originalEr) { + assert(p) + assert(options) + if (originalEr) + assert(originalEr instanceof Error) + + try { + options.rmdirSync(p) + } catch (er) { + if (er.code === "ENOENT") + return + if (er.code === "ENOTDIR") + throw originalEr + if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") + rmkidsSync(p, options) + } +} + +function rmkidsSync (p, options) { + assert(p) + assert(options) + options.readdirSync(p).forEach(function (f) { + rimrafSync(path.join(p, f), options) + }) + + // We only end up here once we got ENOTEMPTY at least once, and + // at this point, we are guaranteed to have removed all the kids. + // So, we know that it won't be ENOENT or ENOTDIR or anything else. + // try really hard to delete stuff on windows, because it has a + // PROFOUNDLY annoying habit of not closing handles promptly when + // files are deleted, resulting in spurious ENOTEMPTY errors. + var retries = isWindows ? 100 : 1 + var i = 0 + do { + var threw = true + try { + var ret = options.rmdirSync(p, options) + threw = false + return ret + } finally { + if (++i < retries && threw) + continue + } + } while (true) +} + + +/***/ }), +/* 570 */, +/* 571 */, +/* 572 */, +/* 573 */, +/* 574 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +module.exports = new Type('tag:yaml.org,2002:str', { + kind: 'scalar', + construct: function (data) { return data !== null ? data : ''; } +}); + + +/***/ }), +/* 575 */, +/* 576 */, +/* 577 */, +/* 578 */, +/* 579 */, +/* 580 */, +/* 581 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Standard YAML's Failsafe schema. +// http://www.yaml.org/spec/1.2/spec.html#id2802346 + + + + + +var Schema = __webpack_require__(43); + + +module.exports = new Schema({ + explicit: [ + __webpack_require__(574), + __webpack_require__(921), + __webpack_require__(988) + ] +}); + + +/***/ }), +/* 582 */, +/* 583 */, +/* 584 */, +/* 585 */, +/* 586 */, +/* 587 */, +/* 588 */, +/* 589 */, +/* 590 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(__webpack_require__(747)); +const core_1 = __webpack_require__(470); +const path_1 = __webpack_require__(622); +const utils_1 = __webpack_require__(870); +/** + * Creates a specification that describes how each file that is part of the artifact will be uploaded + * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server + * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file + * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact + */ +function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { + utils_1.checkArtifactName(artifactName); + const specifications = []; + if (!fs.existsSync(rootDirectory)) { + throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); + } + if (!fs.lstatSync(rootDirectory).isDirectory()) { + throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); + } + // Normalize and resolve, this allows for either absolute or relative paths to be used + rootDirectory = path_1.normalize(rootDirectory); + rootDirectory = path_1.resolve(rootDirectory); + /* + Example to demonstrate behavior + + Input: + artifactName: my-artifact + rootDirectory: '/home/user/files/plz-upload' + artifactFiles: [ + '/home/user/files/plz-upload/file1.txt', + '/home/user/files/plz-upload/file2.txt', + '/home/user/files/plz-upload/dir/file3.txt' + ] + + Output: + specifications: [ + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'], + ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt'] + ] + */ + for (let file of artifactFiles) { + if (!fs.existsSync(file)) { + throw new Error(`File ${file} does not exist`); + } + if (!fs.lstatSync(file).isDirectory()) { + // Normalize and resolve, this allows for either absolute or relative paths to be used + file = path_1.normalize(file); + file = path_1.resolve(file); + if (!file.startsWith(rootDirectory)) { + throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); + } + // Check for forbidden characters in file paths that will be rejected during upload + const uploadPath = file.replace(rootDirectory, ''); + utils_1.checkArtifactFilePath(uploadPath); + /* + uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all + be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts + + path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt + join('artifact-name/', 'file-to-upload.txt') + join('artifact-name/', '/file-to-upload.txt') + join('artifact-name', 'file-to-upload.txt') + join('artifact-name', '/file-to-upload.txt') + */ + specifications.push({ + absoluteFilePath: file, + uploadFilePath: path_1.join(artifactName, uploadPath) + }); + } + else { + // Directories are rejected by the server during upload + core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + } + } + return specifications; +} +exports.getUploadSpecification = getUploadSpecification; +//# sourceMappingURL=upload-specification.js.map + +/***/ }), +/* 591 */, +/* 592 */, +/* 593 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const http = __webpack_require__(605); +const https = __webpack_require__(211); + +function requestUrl(url) { + return new Promise((resolve, reject) => { + (url.startsWith("https://") ? https : http).get(url, response => { + let chunks_of_data = []; + response.on("data", fragments => chunks_of_data.push(fragments)); + response.on("end", () => + resolve(Buffer.concat(chunks_of_data).toString()) + ); + response.on("error", error => reject(error)); + }); + }); +} + +async function getUrlContent(url) { + try { + return await requestUrl(url); + } catch (error) { + throw new Error(`Error getting ${url}. Error: ${error}`); + } +} + +module.exports = { + getUrlContent +}; + + +/***/ }), +/* 594 */, +/* 595 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = minimatch +minimatch.Minimatch = Minimatch + +var path = { sep: '/' } +try { + path = __webpack_require__(622) +} catch (er) {} + +var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} +var expand = __webpack_require__(306) + +var plTypes = { + '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, + '?': { open: '(?:', close: ')?' }, + '+': { open: '(?:', close: ')+' }, + '*': { open: '(?:', close: ')*' }, + '@': { open: '(?:', close: ')' } +} + +// any single thing other than / +// don't need to escape / when using new RegExp() +var qmark = '[^/]' + +// * => any number of characters +var star = qmark + '*?' + +// ** when dots are allowed. Anything goes, except .. and . +// not (^ or / followed by one or two dots followed by $ or /), +// followed by anything, any number of times. +var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' + +// not a ^ or / followed by a dot, +// followed by anything, any number of times. +var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' + +// characters that need to be escaped in RegExp. +var reSpecials = charSet('().*{}+?[]^$\\!') + +// "abc" -> { a:true, b:true, c:true } +function charSet (s) { + return s.split('').reduce(function (set, c) { + set[c] = true + return set + }, {}) +} + +// normalizes slashes. +var slashSplit = /\/+/ + +minimatch.filter = filter +function filter (pattern, options) { + options = options || {} + return function (p, i, list) { + return minimatch(p, pattern, options) + } +} + +function ext (a, b) { + a = a || {} + b = b || {} + var t = {} + Object.keys(b).forEach(function (k) { + t[k] = b[k] + }) + Object.keys(a).forEach(function (k) { + t[k] = a[k] + }) + return t +} + +minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return minimatch + + var orig = minimatch + + var m = function minimatch (p, pattern, options) { + return orig.minimatch(p, pattern, ext(def, options)) + } + + m.Minimatch = function Minimatch (pattern, options) { + return new orig.Minimatch(pattern, ext(def, options)) + } + + return m +} + +Minimatch.defaults = function (def) { + if (!def || !Object.keys(def).length) return Minimatch + return minimatch.defaults(def).Minimatch +} + +function minimatch (p, pattern, options) { + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + + // shortcut: comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + return false + } + + // "" only matches "" + if (pattern.trim() === '') return p === '' + + return new Minimatch(pattern, options).match(p) +} + +function Minimatch (pattern, options) { + if (!(this instanceof Minimatch)) { + return new Minimatch(pattern, options) + } + + if (typeof pattern !== 'string') { + throw new TypeError('glob pattern string required') + } + + if (!options) options = {} + pattern = pattern.trim() + + // windows support: need to use /, not \ + if (path.sep !== '/') { + pattern = pattern.split(path.sep).join('/') + } + + this.options = options + this.set = [] + this.pattern = pattern + this.regexp = null + this.negate = false + this.comment = false + this.empty = false + + // make the set of regexps etc. + this.make() +} + +Minimatch.prototype.debug = function () {} + +Minimatch.prototype.make = make +function make () { + // don't do it more than once. + if (this._made) return + + var pattern = this.pattern + var options = this.options + + // empty patterns and comments match nothing. + if (!options.nocomment && pattern.charAt(0) === '#') { + this.comment = true + return + } + if (!pattern) { + this.empty = true + return + } + + // step 1: figure out negation, etc. + this.parseNegate() + + // step 2: expand braces + var set = this.globSet = this.braceExpand() + + if (options.debug) this.debug = console.error + + this.debug(this.pattern, set) + + // step 3: now we have a set, so turn each one into a series of path-portion + // matching patterns. + // These will be regexps, except in the case of "**", which is + // set to the GLOBSTAR object for globstar behavior, + // and will not contain any / characters + set = this.globParts = set.map(function (s) { + return s.split(slashSplit) + }) + + this.debug(this.pattern, set) + + // glob --> regexps + set = set.map(function (s, si, set) { + return s.map(this.parse, this) + }, this) + + this.debug(this.pattern, set) + + // filter out everything that didn't compile properly. + set = set.filter(function (s) { + return s.indexOf(false) === -1 + }) + + this.debug(this.pattern, set) + + this.set = set +} + +Minimatch.prototype.parseNegate = parseNegate +function parseNegate () { + var pattern = this.pattern + var negate = false + var options = this.options + var negateOffset = 0 + + if (options.nonegate) return + + for (var i = 0, l = pattern.length + ; i < l && pattern.charAt(i) === '!' + ; i++) { + negate = !negate + negateOffset++ + } + + if (negateOffset) this.pattern = pattern.substr(negateOffset) + this.negate = negate +} + +// Brace expansion: +// a{b,c}d -> abd acd +// a{b,}c -> abc ac +// a{0..3}d -> a0d a1d a2d a3d +// a{b,c{d,e}f}g -> abg acdfg acefg +// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg +// +// Invalid sets are not expanded. +// a{2..}b -> a{2..}b +// a{b}c -> a{b}c +minimatch.braceExpand = function (pattern, options) { + return braceExpand(pattern, options) +} + +Minimatch.prototype.braceExpand = braceExpand + +function braceExpand (pattern, options) { + if (!options) { + if (this instanceof Minimatch) { + options = this.options + } else { + options = {} + } + } + + pattern = typeof pattern === 'undefined' + ? this.pattern : pattern + + if (typeof pattern === 'undefined') { + throw new TypeError('undefined pattern') + } + + if (options.nobrace || + !pattern.match(/\{.*\}/)) { + // shortcut. no need to expand. + return [pattern] + } + + return expand(pattern) +} + +// parse a component of the expanded set. +// At this point, no pattern may contain "/" in it +// so we're going to return a 2d array, where each entry is the full +// pattern, split on '/', and then turned into a regular expression. +// A regexp is made at the end which joins each array with an +// escaped /, and another full one which joins each regexp with |. +// +// Following the lead of Bash 4.1, note that "**" only has special meaning +// when it is the *only* thing in a path portion. Otherwise, any series +// of * is equivalent to a single *. Globstar behavior is enabled by +// default, and can be disabled by setting options.noglobstar. +Minimatch.prototype.parse = parse +var SUBPARSE = {} +function parse (pattern, isSub) { + if (pattern.length > 1024 * 64) { + throw new TypeError('pattern is too long') + } + + var options = this.options + + // shortcuts + if (!options.noglobstar && pattern === '**') return GLOBSTAR + if (pattern === '') return '' + + var re = '' + var hasMagic = !!options.nocase + var escaping = false + // ? => one single character + var patternListStack = [] + var negativeLists = [] + var stateChar + var inClass = false + var reClassStart = -1 + var classStart = -1 + // . and .. never match anything that doesn't start with ., + // even when options.dot is set. + var patternStart = pattern.charAt(0) === '.' ? '' // anything + // not (start or / followed by . or .. followed by / or end) + : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' + : '(?!\\.)' + var self = this + + function clearStateChar () { + if (stateChar) { + // we had some state-tracking character + // that wasn't consumed by this pass. + switch (stateChar) { + case '*': + re += star + hasMagic = true + break + case '?': + re += qmark + hasMagic = true + break + default: + re += '\\' + stateChar + break } - }], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentStatus: ["POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createHook: ["POST /repos/{owner}/{repo}/hooks", {}, { - renamed: ["repos", "createWebhook"] - }], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateFile: ["PUT /repos/{owner}/{repo}/contents/{path}", {}, { - renamed: ["repos", "createOrUpdateFileContents"] - }], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] + self.debug('clearStateChar %j %j', stateChar, re) + stateChar = false + } + } + + for (var i = 0, len = pattern.length, c + ; (i < len) && (c = pattern.charAt(i)) + ; i++) { + this.debug('%s\t%s %s %j', pattern, i, re, c) + + // skip over any that are escaped. + if (escaping && reSpecials[c]) { + re += '\\' + c + escaping = false + continue + } + + switch (c) { + case '/': + // completely not allowed, even escaped. + // Should already be path-split by now. + return false + + case '\\': + clearStateChar() + escaping = true + continue + + // the various stateChar values + // for the "extglob" stuff. + case '?': + case '*': + case '+': + case '@': + case '!': + this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) + + // all of those are literals inside a class, except that + // the glob [!a] means [^a] in regexp + if (inClass) { + this.debug(' in class') + if (c === '!' && i === classStart + 1) c = '^' + re += c + continue + } + + // if we already have a stateChar, then it means + // that there was something like ** or +? in there. + // Handle the stateChar, then proceed with this one. + self.debug('call clearStateChar %j', stateChar) + clearStateChar() + stateChar = c + // if extglob is disabled, then +(asdf|foo) isn't a thing. + // just clear the statechar *now*, rather than even diving into + // the patternList stuff. + if (options.noext) clearStateChar() + continue + + case '(': + if (inClass) { + re += '(' + continue + } + + if (!stateChar) { + re += '\\(' + continue + } + + patternListStack.push({ + type: stateChar, + start: i - 1, + reStart: re.length, + open: plTypes[stateChar].open, + close: plTypes[stateChar].close + }) + // negation is (?:(?!js)[^/]*) + re += stateChar === '!' ? '(?:(?!(?:' : '(?:' + this.debug('plType %j %j', stateChar, re) + stateChar = false + continue + + case ')': + if (inClass || !patternListStack.length) { + re += '\\)' + continue + } + + clearStateChar() + hasMagic = true + var pl = patternListStack.pop() + // negation is (?:(?!js)[^/]*) + // The others are (?:) + re += pl.close + if (pl.type === '!') { + negativeLists.push(pl) + } + pl.reEnd = re.length + continue + + case '|': + if (inClass || !patternListStack.length || escaping) { + re += '\\|' + escaping = false + continue + } + + clearStateChar() + re += '|' + continue + + // these are mostly the same in regexp and glob + case '[': + // swallow any state-tracking char before the [ + clearStateChar() + + if (inClass) { + re += '\\' + c + continue + } + + inClass = true + classStart = i + reClassStart = re.length + re += c + continue + + case ']': + // a right bracket shall lose its special + // meaning and represent itself in + // a bracket expression if it occurs + // first in the list. -- POSIX.2 2.8.3.2 + if (i === classStart + 1 || !inClass) { + re += '\\' + c + escaping = false + continue + } + + // handle the case where we left a class open. + // "[z-a]" is valid, equivalent to "\[z-a\]" + if (inClass) { + // split where the last [ was, make sure we don't have + // an invalid re. if so, re-walk the contents of the + // would-be class to re-translate any characters that + // were passed through as-is + // TODO: It would probably be faster to determine this + // without a try/catch and a new RegExp, but it's tricky + // to do safely. For now, this is safe and works. + var cs = pattern.substring(classStart + 1, i) + try { + RegExp('[' + cs + ']') + } catch (er) { + // not a valid class! + var sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' + hasMagic = hasMagic || sp[1] + inClass = false + continue + } + } + + // finish up the class. + hasMagic = true + inClass = false + re += c + continue + + default: + // swallow any state char that wasn't consumed + clearStateChar() + + if (escaping) { + // no need + escaping = false + } else if (reSpecials[c] + && !(c === '^' && inClass)) { + re += '\\' + } + + re += c + + } // switch + } // for + + // handle the case where we left a class open. + // "[abc" is valid, equivalent to "\[abc" + if (inClass) { + // split where the last [ was, and escape it + // this is a huge pita. We now have to re-walk + // the contents of the would-be class to re-translate + // any characters that were passed through as-is + cs = pattern.substr(classStart + 1) + sp = this.parse(cs, SUBPARSE) + re = re.substr(0, reClassStart) + '\\[' + sp[0] + hasMagic = hasMagic || sp[1] + } + + // handle the case where we had a +( thing at the *end* + // of the pattern. + // each pattern list stack adds 3 chars, and we need to go through + // and escape any | chars that were passed through as-is for the regexp. + // Go through and escape them, taking care not to double-escape any + // | chars that were already escaped. + for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { + var tail = re.slice(pl.reStart + pl.open.length) + this.debug('setting tail', re, pl) + // maybe some even number of \, then maybe 1 \, followed by a | + tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { + if (!$2) { + // the | isn't already escaped, so escape it. + $2 = '\\' } - }], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}", {}, { - renamed: ["repos", "createCommitStatus"] - }], - createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate", { - mediaType: { - previews: ["baptiste"] + + // need to escape all those slashes *again*, without escaping the + // one that we need for escaping the | character. As it works out, + // escaping an even number of slashes can be done by simply repeating + // it exactly after itself. That's why this trick works. + // + // I am sorry that you have to see this. + return $1 + $1 + $2 + '|' + }) + + this.debug('tail=%j\n %s', tail, tail, pl, re) + var t = pl.type === '*' ? star + : pl.type === '?' ? qmark + : '\\' + pl.type + + hasMagic = true + re = re.slice(0, pl.reStart) + t + '\\(' + tail + } + + // handle trailing things that only matter at the very end. + clearStateChar() + if (escaping) { + // trailing \\ + re += '\\\\' + } + + // only need to apply the nodot start if the re starts with + // something that could conceivably capture a dot + var addPatternStart = false + switch (re.charAt(0)) { + case '.': + case '[': + case '(': addPatternStart = true + } + + // Hack to work around lack of negative lookbehind in JS + // A pattern like: *.!(x).!(y|z) needs to ensure that a name + // like 'a.xyz.yz' doesn't match. So, the first negative + // lookahead, has to look ALL the way ahead, to the end of + // the pattern. + for (var n = negativeLists.length - 1; n > -1; n--) { + var nl = negativeLists[n] + + var nlBefore = re.slice(0, nl.reStart) + var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) + var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + var nlAfter = re.slice(nl.reEnd) + + nlLast += nlAfter + + // Handle nested stuff like *(*.js|!(*.json)), where open parens + // mean that we should *not* include the ) in the bit that is considered + // "after" the negated section. + var openParensBefore = nlBefore.split('(').length - 1 + var cleanAfter = nlAfter + for (i = 0; i < openParensBefore; i++) { + cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') + } + nlAfter = cleanAfter + + var dollar = '' + if (nlAfter === '' && isSub !== SUBPARSE) { + dollar = '$' + } + var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast + re = newRe + } + + // if the re is not "" at this point, then we need to make sure + // it doesn't match against an empty path part. + // Otherwise a/* will match a/, which it should not. + if (re !== '' && hasMagic) { + re = '(?=.)' + re + } + + if (addPatternStart) { + re = patternStart + re + } + + // parsing just a piece of a larger pattern. + if (isSub === SUBPARSE) { + return [re, hasMagic] + } + + // skip the regexp for non-magical patterns + // unescape anything in it, though, so that it'll be + // an exact match against a file etc. + if (!hasMagic) { + return globUnescape(pattern) + } + + var flags = options.nocase ? 'i' : '' + try { + var regExp = new RegExp('^' + re + '$', flags) + } catch (er) { + // If it was an invalid regular expression, then it can't match + // anything. This trick looks for a character after the end of + // the string, which is of course impossible, except in multi-line + // mode, but it's not a /m regex. + return new RegExp('$.') + } + + regExp._glob = pattern + regExp._src = re + + return regExp +} + +minimatch.makeRe = function (pattern, options) { + return new Minimatch(pattern, options || {}).makeRe() +} + +Minimatch.prototype.makeRe = makeRe +function makeRe () { + if (this.regexp || this.regexp === false) return this.regexp + + // at this point, this.set is a 2d array of partial + // pattern strings, or "**". + // + // It's better to use .match(). This function shouldn't + // be used, really, but it's pretty convenient sometimes, + // when you just want to work with a regex. + var set = this.set + + if (!set.length) { + this.regexp = false + return this.regexp + } + var options = this.options + + var twoStar = options.noglobstar ? star + : options.dot ? twoStarDot + : twoStarNoDot + var flags = options.nocase ? 'i' : '' + + var re = set.map(function (pattern) { + return pattern.map(function (p) { + return (p === GLOBSTAR) ? twoStar + : (typeof p === 'string') ? regExpEscape(p) + : p._src + }).join('\\\/') + }).join('|') + + // must match entire pattern + // ending in a * or ** will make it less strict. + re = '^(?:' + re + ')$' + + // can match anything, as long as it's not this. + if (this.negate) re = '^(?!' + re + ').*$' + + try { + this.regexp = new RegExp(re, flags) + } catch (ex) { + this.regexp = false + } + return this.regexp +} + +minimatch.match = function (list, pattern, options) { + options = options || {} + var mm = new Minimatch(pattern, options) + list = list.filter(function (f) { + return mm.match(f) + }) + if (mm.options.nonull && !list.length) { + list.push(pattern) + } + return list +} + +Minimatch.prototype.match = match +function match (f, partial) { + this.debug('match', f, this.pattern) + // short-circuit in the case of busted things. + // comments, etc. + if (this.comment) return false + if (this.empty) return f === '' + + if (f === '/' && partial) return true + + var options = this.options + + // windows: need to use /, not \ + if (path.sep !== '/') { + f = f.split(path.sep).join('/') + } + + // treat the test path as a set of pathparts. + f = f.split(slashSplit) + this.debug(this.pattern, 'split', f) + + // just ONE of the pattern sets in this.set needs to match + // in order for it to be valid. If negating, then just one + // match means that we have failed. + // Either way, return on the first hit. + + var set = this.set + this.debug(this.pattern, 'set', set) + + // Find the basename of the path by looking for the last non-empty segment + var filename + var i + for (i = f.length - 1; i >= 0; i--) { + filename = f[i] + if (filename) break + } + + for (i = 0; i < set.length; i++) { + var pattern = set[i] + var file = f + if (options.matchBase && pattern.length === 1) { + file = [filename] + } + var hit = this.matchOne(file, pattern, partial) + if (hit) { + if (options.flipNegate) return true + return !this.negate + } + } + + // didn't get any hits. this is success if it's a negative + // pattern, failure otherwise. + if (options.flipNegate) return false + return this.negate +} + +// set partial to true to test if, for example, +// "/a/b" matches the start of "/*/b/*/d" +// Partial means, if you run out of file before you run +// out of pattern, then that's fine, as long as all +// the parts match. +Minimatch.prototype.matchOne = function (file, pattern, partial) { + var options = this.options + + this.debug('matchOne', + { 'this': this, file: file, pattern: pattern }) + + this.debug('matchOne', file.length, pattern.length) + + for (var fi = 0, + pi = 0, + fl = file.length, + pl = pattern.length + ; (fi < fl) && (pi < pl) + ; fi++, pi++) { + this.debug('matchOne loop') + var p = pattern[pi] + var f = file[fi] + + this.debug(pattern, p, f) + + // should be impossible. + // some invalid regexp stuff in the set. + if (p === false) return false + + if (p === GLOBSTAR) { + this.debug('GLOBSTAR', [pattern, p, f]) + + // "**" + // a/**/b/**/c would match the following: + // a/b/x/y/z/c + // a/x/y/z/b/c + // a/b/x/b/x/c + // a/b/c + // To do this, take the rest of the pattern after + // the **, and see if it would match the file remainder. + // If so, return success. + // If not, the ** "swallows" a segment, and try again. + // This is recursively awful. + // + // a/**/b/**/c matching a/b/x/y/z/c + // - a matches a + // - doublestar + // - matchOne(b/x/y/z/c, b/**/c) + // - b matches b + // - doublestar + // - matchOne(x/y/z/c, c) -> no + // - matchOne(y/z/c, c) -> no + // - matchOne(z/c, c) -> no + // - matchOne(c, c) yes, hit + var fr = fi + var pr = pi + 1 + if (pr === pl) { + this.debug('** at the end') + // a ** at the end will just swallow the rest. + // We have found a match. + // however, it will not swallow /.x, unless + // options.dot is set. + // . and .. are *never* matched by **, for explosively + // exponential reasons. + for (; fi < fl; fi++) { + if (file[fi] === '.' || file[fi] === '..' || + (!options.dot && file[fi].charAt(0) === '.')) return false + } + return true } - }], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}"], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - deleteAdminBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - deleteBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection"], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + + // ok, let's see if we can swallow whatever we can. + while (fr < fl) { + var swallowee = file[fr] + + this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) + + // XXX remove this slice. Just pass the start index. + if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { + this.debug('globstar found match!', fr, fl, swallowee) + // found a match. + return true + } else { + // can't swallow "." or ".." ever. + // can only swallow ".foo" when explicitly asked. + if (swallowee === '.' || swallowee === '..' || + (!options.dot && swallowee.charAt(0) === '.')) { + this.debug('dot detected!', file, fr, pattern, pr) + break + } + + // ** swallows a segment, and continue. + this.debug('globstar swallow a segment, and continue') + fr++ + } } - }], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: ["DELETE /repos/{owner}/{repo}/deployments/{deployment_id}"], - deleteDownload: ["DELETE /repos/{owner}/{repo}/downloads/{download_id}"], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteHook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}", {}, { - renamed: ["repos", "deleteWebhook"] - }], - deleteInvitation: ["DELETE /repos/{owner}/{repo}/invitations/{invitation_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] + + // no match was found. + // However, in partial mode, we can't say this is necessarily over. + // If there's more *pattern* left, then + if (partial) { + // ran out of file + this.debug('\n>>> no match, partial?', file, fr, pattern, pr) + if (fr === fl) return true } - }], - deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] + return false + } + + // something other than ** + // non-magic patterns just have to match exactly + // patterns with magic have been turned into regexps. + var hit + if (typeof p === 'string') { + if (options.nocase) { + hit = f.toLowerCase() === p.toLowerCase() + } else { + hit = f === p } - }], - disablePagesSite: ["DELETE /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] + this.debug('string match', p, f, hit) + } else { + hit = f.match(p) + this.debug('pattern match', p, f, hit) + } + + if (!hit) return false + } + + // Note: ending in / means that we'll get a final "" + // at the end of the pattern. This can only match a + // corresponding "" at the end of the file. + // If the file ends in /, then it can only match a + // a pattern that ends in /, unless the pattern just + // doesn't have any more for it. But, a/b/ should *not* + // match "a/b/*", even though "" matches against the + // [^/]*? pattern, except in partial mode, where it might + // simply not be reached yet. + // However, a/b/ should still satisfy a/* + + // now either we fell off the end of the pattern, or we're done. + if (fi === fl && pi === pl) { + // ran out of pattern and filename at the same time. + // an exact hit! + return true + } else if (fi === fl) { + // ran out of file, but still had pattern left. + // this is ok if we're doing the match as part of + // a glob fs traversal. + return partial + } else if (pi === pl) { + // ran out of pattern, still have file left. + // this is only acceptable if we're on the very last + // empty segment of a file with a trailing slash. + // a/* should match a/b/ + var emptyFileEnd = (fi === fl - 1) && (file[fi] === '') + return emptyFileEnd + } + + // should be unreachable. + throw new Error('wtf?') +} + +// replace stuff like \* with * +function globUnescape (s) { + return s.replace(/\\(.)/g, '$1') +} + +function regExpEscape (s) { + return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') +} + + +/***/ }), +/* 596 */, +/* 597 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const pathHelper = __webpack_require__(972); +const internal_match_kind_1 = __webpack_require__(327); +const IS_WINDOWS = process.platform === 'win32'; +/** + * Given an array of patterns, returns an array of paths to search. + * Duplicates and paths under other included paths are filtered out. + */ +function getSearchPaths(patterns) { + // Ignore negate patterns + patterns = patterns.filter(x => !x.negate); + // Create a map of all search paths + const searchPathMap = {}; + for (const pattern of patterns) { + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + searchPathMap[key] = 'candidate'; + } + const result = []; + for (const pattern of patterns) { + // Check if already included + const key = IS_WINDOWS + ? pattern.searchPath.toUpperCase() + : pattern.searchPath; + if (searchPathMap[key] === 'included') { + continue; + } + // Check for an ancestor search path + let foundAncestor = false; + let tempKey = key; + let parent = pathHelper.dirname(tempKey); + while (parent !== tempKey) { + if (searchPathMap[parent]) { + foundAncestor = true; + break; + } + tempKey = parent; + parent = pathHelper.dirname(tempKey); + } + // Include the search pattern in the result + if (!foundAncestor) { + result.push(pattern.searchPath); + searchPathMap[key] = 'included'; + } + } + return result; +} +exports.getSearchPaths = getSearchPaths; +/** + * Matches the patterns against the path + */ +function match(patterns, itemPath) { + let result = internal_match_kind_1.MatchKind.None; + for (const pattern of patterns) { + if (pattern.negate) { + result &= ~pattern.match(itemPath); + } + else { + result |= pattern.match(itemPath); + } + } + return result; +} +exports.match = match; +/** + * Checks whether to descend further into the directory + */ +function partialMatch(patterns, itemPath) { + return patterns.some(x => !x.negate && x.partialMatch(itemPath)); +} +exports.partialMatch = partialMatch; +//# sourceMappingURL=internal-pattern-helper.js.map + +/***/ }), +/* 598 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var fs = __webpack_require__(747) +var polyfills = __webpack_require__(920) +var legacy = __webpack_require__(466) +var clone = __webpack_require__(943) + +var util = __webpack_require__(669) + +/* istanbul ignore next - node 0.x polyfill */ +var gracefulQueue +var previousSymbol + +/* istanbul ignore else - node 0.x polyfill */ +if (typeof Symbol === 'function' && typeof Symbol.for === 'function') { + gracefulQueue = Symbol.for('graceful-fs.queue') + // This is used in testing by future versions + previousSymbol = Symbol.for('graceful-fs.previous') +} else { + gracefulQueue = '___graceful-fs.queue' + previousSymbol = '___graceful-fs.previous' +} + +function noop () {} + +function publishQueue(context, queue) { + Object.defineProperty(context, gracefulQueue, { + get: function() { + return queue + } + }) +} + +var debug = noop +if (util.debuglog) + debug = util.debuglog('gfs4') +else if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) + debug = function() { + var m = util.format.apply(util, arguments) + m = 'GFS4: ' + m.split(/\n/).join('\nGFS4: ') + console.error(m) + } + +// Once time initialization +if (!fs[gracefulQueue]) { + // This queue can be shared by multiple loaded instances + var queue = global[gracefulQueue] || [] + publishQueue(fs, queue) + + // Patch fs.close/closeSync to shared queue version, because we need + // to retry() whenever a close happens *anywhere* in the program. + // This is essential when multiple graceful-fs instances are + // in play at the same time. + fs.close = (function (fs$close) { + function close (fd, cb) { + return fs$close.call(fs, fd, function (err) { + // This function uses the graceful-fs shared queue + if (!err) { + retry() + } + + if (typeof cb === 'function') + cb.apply(this, arguments) + }) + } + + Object.defineProperty(close, previousSymbol, { + value: fs$close + }) + return close + })(fs.close) + + fs.closeSync = (function (fs$closeSync) { + function closeSync (fd) { + // This function uses the graceful-fs shared queue + fs$closeSync.apply(fs, arguments) + retry() + } + + Object.defineProperty(closeSync, previousSymbol, { + value: fs$closeSync + }) + return closeSync + })(fs.closeSync) + + if (/\bgfs4\b/i.test(process.env.NODE_DEBUG || '')) { + process.on('exit', function() { + debug(fs[gracefulQueue]) + __webpack_require__(357).equal(fs[gracefulQueue].length, 0) + }) + } +} + +if (!global[gracefulQueue]) { + publishQueue(global, fs[gracefulQueue]); +} + +module.exports = patch(clone(fs)) +if (process.env.TEST_GRACEFUL_FS_GLOBAL_PATCH && !fs.__patched) { + module.exports = patch(fs) + fs.__patched = true; +} + +function patch (fs) { + // Everything that references the open() function needs to be in here + polyfills(fs) + fs.gracefulify = patch + + fs.createReadStream = createReadStream + fs.createWriteStream = createWriteStream + var fs$readFile = fs.readFile + fs.readFile = readFile + function readFile (path, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$readFile(path, options, cb) + + function go$readFile (path, options, cb) { + return fs$readFile(path, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readFile, [path, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$writeFile = fs.writeFile + fs.writeFile = writeFile + function writeFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$writeFile(path, data, options, cb) + + function go$writeFile (path, data, options, cb) { + return fs$writeFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$writeFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$appendFile = fs.appendFile + if (fs$appendFile) + fs.appendFile = appendFile + function appendFile (path, data, options, cb) { + if (typeof options === 'function') + cb = options, options = null + + return go$appendFile(path, data, options, cb) + + function go$appendFile (path, data, options, cb) { + return fs$appendFile(path, data, options, function (err) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$appendFile, [path, data, options, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + var fs$readdir = fs.readdir + fs.readdir = readdir + function readdir (path, options, cb) { + var args = [path] + if (typeof options !== 'function') { + args.push(options) + } else { + cb = options + } + args.push(go$readdir$cb) + + return go$readdir(args) + + function go$readdir$cb (err, files) { + if (files && files.sort) + files.sort() + + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$readdir, [args]]) + + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() } - }, { - renamed: ["repos", "deletePagesSite"] - }], - disableVulnerabilityAlerts: ["DELETE /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] + } + } + + function go$readdir (args) { + return fs$readdir.apply(fs, args) + } + + if (process.version.substr(0, 4) === 'v0.8') { + var legStreams = legacy(fs) + ReadStream = legStreams.ReadStream + WriteStream = legStreams.WriteStream + } + + var fs$ReadStream = fs.ReadStream + if (fs$ReadStream) { + ReadStream.prototype = Object.create(fs$ReadStream.prototype) + ReadStream.prototype.open = ReadStream$open + } + + var fs$WriteStream = fs.WriteStream + if (fs$WriteStream) { + WriteStream.prototype = Object.create(fs$WriteStream.prototype) + WriteStream.prototype.open = WriteStream$open + } + + Object.defineProperty(fs, 'ReadStream', { + get: function () { + return ReadStream + }, + set: function (val) { + ReadStream = val + }, + enumerable: true, + configurable: true + }) + Object.defineProperty(fs, 'WriteStream', { + get: function () { + return WriteStream + }, + set: function (val) { + WriteStream = val + }, + enumerable: true, + configurable: true + }) + + // legacy names + var FileReadStream = ReadStream + Object.defineProperty(fs, 'FileReadStream', { + get: function () { + return FileReadStream + }, + set: function (val) { + FileReadStream = val + }, + enumerable: true, + configurable: true + }) + var FileWriteStream = WriteStream + Object.defineProperty(fs, 'FileWriteStream', { + get: function () { + return FileWriteStream + }, + set: function (val) { + FileWriteStream = val + }, + enumerable: true, + configurable: true + }) + + function ReadStream (path, options) { + if (this instanceof ReadStream) + return fs$ReadStream.apply(this, arguments), this + else + return ReadStream.apply(Object.create(ReadStream.prototype), arguments) + } + + function ReadStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + if (that.autoClose) + that.destroy() + + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) + that.read() } - }], - downloadArchive: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}"], - enableAutomatedSecurityFixes: ["PUT /repos/{owner}/{repo}/automated-security-fixes", { - mediaType: { - previews: ["london"] + }) + } + + function WriteStream (path, options) { + if (this instanceof WriteStream) + return fs$WriteStream.apply(this, arguments), this + else + return WriteStream.apply(Object.create(WriteStream.prototype), arguments) + } + + function WriteStream$open () { + var that = this + open(that.path, that.flags, that.mode, function (err, fd) { + if (err) { + that.destroy() + that.emit('error', err) + } else { + that.fd = fd + that.emit('open', fd) } - }], - enablePagesSite: ["POST /repos/{owner}/{repo}/pages", { - mediaType: { - previews: ["switcheroo"] + }) + } + + function createReadStream (path, options) { + return new fs.ReadStream(path, options) + } + + function createWriteStream (path, options) { + return new fs.WriteStream(path, options) + } + + var fs$open = fs.open + fs.open = open + function open (path, flags, mode, cb) { + if (typeof mode === 'function') + cb = mode, mode = null + + return go$open(path, flags, mode, cb) + + function go$open (path, flags, mode, cb) { + return fs$open(path, flags, mode, function (err, fd) { + if (err && (err.code === 'EMFILE' || err.code === 'ENFILE')) + enqueue([go$open, [path, flags, mode, cb]]) + else { + if (typeof cb === 'function') + cb.apply(this, arguments) + retry() + } + }) + } + } + + return fs +} + +function enqueue (elem) { + debug('ENQUEUE', elem[0].name, elem[1]) + fs[gracefulQueue].push(elem) +} + +function retry () { + var elem = fs[gracefulQueue].shift() + if (elem) { + debug('RETRY', elem[0].name, elem[1]) + elem[0].apply(null, elem[1]) + } +} + + +/***/ }), +/* 599 */, +/* 600 */, +/* 601 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +const core = __webpack_require__(470); +/** + * Returns a copy with defaults filled in. + */ +function getOptions(copy) { + const result = { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; + if (copy) { + if (typeof copy.followSymbolicLinks === 'boolean') { + result.followSymbolicLinks = copy.followSymbolicLinks; + core.debug(`followSymbolicLinks '${result.followSymbolicLinks}'`); + } + if (typeof copy.implicitDescendants === 'boolean') { + result.implicitDescendants = copy.implicitDescendants; + core.debug(`implicitDescendants '${result.implicitDescendants}'`); + } + if (typeof copy.omitBrokenSymbolicLinks === 'boolean') { + result.omitBrokenSymbolicLinks = copy.omitBrokenSymbolicLinks; + core.debug(`omitBrokenSymbolicLinks '${result.omitBrokenSymbolicLinks}'`); + } + } + return result; +} +exports.getOptions = getOptions; +//# sourceMappingURL=internal-glob-options-helper.js.map + +/***/ }), +/* 602 */, +/* 603 */, +/* 604 */, +/* 605 */ +/***/ (function(module) { + +module.exports = require("http"); + +/***/ }), +/* 606 */, +/* 607 */, +/* 608 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(__webpack_require__(747)); +const core = __importStar(__webpack_require__(470)); +const tmp = __importStar(__webpack_require__(875)); +const stream = __importStar(__webpack_require__(794)); +const utils_1 = __webpack_require__(870); +const config_variables_1 = __webpack_require__(401); +const util_1 = __webpack_require__(669); +const url_1 = __webpack_require__(835); +const perf_hooks_1 = __webpack_require__(630); +const status_reporter_1 = __webpack_require__(176); +const http_manager_1 = __webpack_require__(452); +const upload_gzip_1 = __webpack_require__(647); +const stat = util_1.promisify(fs.stat); +class UploadHttpClient { + constructor() { + this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); + this.statusReporter = new status_reporter_1.StatusReporter(10000); + } + /** + * Creates a file container for the new artifact in the remote blob storage/file service + * @param {string} artifactName Name of the artifact being created + * @returns The response from the Artifact Service if the file container was successfully created + */ + createArtifactInFileContainer(artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const parameters = { + Type: 'actions_storage', + Name: artifactName + }; + const data = JSON.stringify(parameters, null, 2); + const artifactUrl = utils_1.getArtifactUrl(); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const headers = utils_1.getUploadHeaders('application/json', false); + const rawResponse = yield client.post(artifactUrl, data, headers); + const body = yield rawResponse.readBody(); + if (utils_1.isSuccessStatusCode(rawResponse.message.statusCode) && body) { + return JSON.parse(body); + } + else if (utils_1.isForbiddenStatusCode(rawResponse.message.statusCode)) { + // if a 403 is returned when trying to create a file container, the customer has exceeded + // their storage quota so no new artifact containers can be created + throw new Error(`Artifact storage quota has been hit. Unable to upload any new artifacts`); + } + else { + utils_1.displayHttpDiagnostics(rawResponse); + throw new Error(`Unable to create a container for the artifact ${artifactName} at ${artifactUrl}`); + } + }); + } + /** + * Concurrently upload all of the files in chunks + * @param {string} uploadUrl Base Url for the artifact that was created + * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded + * @returns The size of all the files uploaded in bytes + */ + uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { + return __awaiter(this, void 0, void 0, function* () { + const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); + const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); + core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); + const parameters = []; + // by default, file uploads will continue if there is an error unless specified differently in the options + let continueOnError = true; + if (options) { + if (options.continueOnError === false) { + continueOnError = false; + } + } + // prepare the necessary parameters to upload all the files + for (const file of filesToUpload) { + const resourceUrl = new url_1.URL(uploadUrl); + resourceUrl.searchParams.append('itemPath', file.uploadFilePath); + parameters.push({ + file: file.absoluteFilePath, + resourceUrl: resourceUrl.toString(), + maxChunkSize: MAX_CHUNK_SIZE, + continueOnError + }); + } + const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()]; + const failedItemsToReport = []; + let currentFile = 0; + let completedFiles = 0; + let uploadFileSize = 0; + let totalFileSize = 0; + let abortPendingFileUploads = false; + this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length); + this.statusReporter.start(); + // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors + yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () { + while (currentFile < filesToUpload.length) { + const currentFileParameters = parameters[currentFile]; + currentFile += 1; + if (abortPendingFileUploads) { + failedItemsToReport.push(currentFileParameters.file); + continue; + } + const startTime = perf_hooks_1.performance.now(); + const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters); + if (core.isDebug()) { + core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`); + } + uploadFileSize += uploadFileResult.successfulUploadSize; + totalFileSize += uploadFileResult.totalSize; + if (uploadFileResult.isSuccess === false) { + failedItemsToReport.push(currentFileParameters.file); + if (!continueOnError) { + // fail fast + core.error(`aborting artifact upload`); + abortPendingFileUploads = true; + } + } + this.statusReporter.incrementProcessedCount(); + } + }))); + this.statusReporter.stop(); + // done uploading, safety dispose all connections + this.uploadHttpManager.disposeAndReplaceAllClients(); + core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`); + return { + uploadSize: uploadFileSize, + totalSize: totalFileSize, + failedItems: failedItemsToReport + }; + }); + } + /** + * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space. + * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls + * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls + * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded + * @returns The size of the file that was uploaded in bytes along with any failed uploads + */ + uploadFileAsync(httpClientIndex, parameters) { + return __awaiter(this, void 0, void 0, function* () { + const totalFileSize = (yield stat(parameters.file)).size; + let offset = 0; + let isUploadSuccessful = true; + let failedChunkSizes = 0; + let uploadFileSize = 0; + let isGzip = true; + // the file that is being uploaded is less than 64k in size, to increase throughput and to minimize disk I/O + // for creating a new GZip file, an in-memory buffer is used for compression + if (totalFileSize < 65536) { + const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); + //An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, + // it will not properly get reset to the start of the stream if a chunk upload needs to be retried + let openUploadStream; + if (totalFileSize < buffer.byteLength) { + // compression did not help with reducing the size, use a readable stream from the original file for upload + openUploadStream = () => fs.createReadStream(parameters.file); + isGzip = false; + uploadFileSize = totalFileSize; + } + else { + // create a readable stream using a PassThrough stream that is both readable and writable + openUploadStream = () => { + const passThrough = new stream.PassThrough(); + passThrough.end(buffer); + return passThrough; + }; + uploadFileSize = buffer.byteLength; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // chunk failed to upload + isUploadSuccessful = false; + failedChunkSizes += uploadFileSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + } + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + else { + // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the + // npm tmp-promise package and this file gets used to create a GZipped file + const tempFile = yield tmp.file(); + // create a GZip file of the original file being uploaded, the original file should not be modified in any way + uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); + let uploadFilePath = tempFile.path; + // compression did not help with size reduction, use the original file for upload and delete the temp GZip file + if (totalFileSize < uploadFileSize) { + uploadFileSize = totalFileSize; + uploadFilePath = parameters.file; + isGzip = false; + } + let abortFileUpload = false; + // upload only a single chunk at a time + while (offset < uploadFileSize) { + const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize); + // if an individual file is greater than 100MB (1024*1024*100) in size, display extra information about the upload status + if (uploadFileSize > 104857600) { + this.statusReporter.updateLargeFileStatus(parameters.file, offset, uploadFileSize); + } + const start = offset; + const end = offset + chunkSize - 1; + offset += parameters.maxChunkSize; + if (abortFileUpload) { + // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed + failedChunkSizes += chunkSize; + continue; + } + const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, { + start, + end, + autoClose: false + }), start, end, uploadFileSize, isGzip, totalFileSize); + if (!result) { + // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was + // successfully uploaded so the server may report a different size for what was uploaded + isUploadSuccessful = false; + failedChunkSizes += chunkSize; + core.warning(`Aborting upload for ${parameters.file} due to failure`); + abortFileUpload = true; + } + } + // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by + // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about + yield tempFile.cleanup(); + return { + isSuccess: isUploadSuccessful, + successfulUploadSize: uploadFileSize - failedChunkSizes, + totalSize: totalFileSize + }; + } + }); + } + /** + * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code + * indicates a retryable status, we try to upload the chunk as well + * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls + * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to + * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded + * @param {number} start Starting byte index of file that the chunk belongs to + * @param {number} end Ending byte index of file that the chunk belongs to + * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded + * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream + * @param {number} totalFileSize Original total size of the file that is being uploaded + * @returns if the chunk was successfully uploaded + */ + uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { + return __awaiter(this, void 0, void 0, function* () { + // prepare all the necessary headers before making any http call + const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize)); + const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { + const client = this.uploadHttpManager.getClient(httpClientIndex); + return yield client.sendStream('PUT', resourceUrl, openStream(), headers); + }); + let retryCount = 0; + const retryLimit = config_variables_1.getRetryLimit(); + // Increments the current retry count and then checks if the retry limit has been reached + // If there have been too many retries, fail so the download stops + const incrementAndCheckRetryLimit = (response) => { + retryCount++; + if (retryCount > retryLimit) { + if (response) { + utils_1.displayHttpDiagnostics(response); + } + core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); + return true; + } + return false; + }; + const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () { + this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); + if (retryAfterValue) { + core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); + yield new Promise(resolve => setTimeout(resolve, retryAfterValue)); + } + else { + const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); + yield new Promise(resolve => setTimeout(resolve, backoffTime)); + } + core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); + return; + }); + // allow for failed chunks to be retried multiple times + while (retryCount <= retryLimit) { + let response; + try { + response = yield uploadChunkRequest(); + } + catch (error) { + // if an error is caught, it is usually indicative of a timeout so retry the upload + core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`); + // eslint-disable-next-line no-console + console.log(error); + if (incrementAndCheckRetryLimit()) { + return false; + } + yield backOff(); + continue; + } + // Always read the body of the response. There is potential for a resource leak if the body is not read which will + // result in the connection remaining open along with unintended consequences when trying to dispose of the client + yield response.readBody(); + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + return true; + } + else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { + core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); + if (incrementAndCheckRetryLimit(response)) { + return false; + } + utils_1.isThrottledStatusCode(response.message.statusCode) + ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + : yield backOff(); + } + else { + core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); + utils_1.displayHttpDiagnostics(response); + return false; + } + } + return false; + }); + } + /** + * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact. + * Updating the size indicates that we are done uploading all the contents of the artifact + */ + patchArtifactSize(size, artifactName) { + return __awaiter(this, void 0, void 0, function* () { + const headers = utils_1.getUploadHeaders('application/json', false); + const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); + resourceUrl.searchParams.append('artifactName', artifactName); + const parameters = { Size: size }; + const data = JSON.stringify(parameters, null, 2); + core.debug(`URL is ${resourceUrl.toString()}`); + // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately + const client = this.uploadHttpManager.getClient(0); + const response = yield client.patch(resourceUrl.toString(), data, headers); + const body = yield response.readBody(); + if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); + } + else if (response.message.statusCode === 404) { + throw new Error(`An Artifact with the name ${artifactName} was not found`); + } + else { + utils_1.displayHttpDiagnostics(response); + core.info(body); + throw new Error(`Unable to finish uploading artifact ${artifactName} to ${resourceUrl}`); + } + }); + } +} +exports.UploadHttpClient = UploadHttpClient; +//# sourceMappingURL=upload-http-client.js.map + +/***/ }), +/* 609 */, +/* 610 */, +/* 611 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// Standard YAML's Core schema. +// http://www.yaml.org/spec/1.2/spec.html#id2804923 +// +// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. +// So, Core schema has no distinctions from JSON schema is JS-YAML. + + + + + +var Schema = __webpack_require__(43); + + +module.exports = new Schema({ + include: [ + __webpack_require__(23) + ] +}); + + +/***/ }), +/* 612 */, +/* 613 */, +/* 614 */ +/***/ (function(module) { + +module.exports = require("events"); + +/***/ }), +/* 615 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const u = __webpack_require__(676).fromCallback +const fs = __webpack_require__(598) +const path = __webpack_require__(622) +const mkdir = __webpack_require__(727) +const remove = __webpack_require__(368) + +const emptyDir = u(function emptyDir (dir, callback) { + callback = callback || function () {} + fs.readdir(dir, (err, items) => { + if (err) return mkdir.mkdirs(dir, callback) + + items = items.map(item => path.join(dir, item)) + + deleteItem() + + function deleteItem () { + const item = items.pop() + if (!item) return callback() + remove.remove(item, err => { + if (err) return callback(err) + deleteItem() + }) + } + }) +}) + +function emptyDirSync (dir) { + let items + try { + items = fs.readdirSync(dir) + } catch { + return mkdir.mkdirsSync(dir) + } + + items.forEach(item => { + item = path.join(dir, item) + remove.removeSync(item) + }) +} + +module.exports = { + emptyDirSync, + emptydirSync: emptyDirSync, + emptyDir, + emptydir: emptyDir +} + + +/***/ }), +/* 616 */, +/* 617 */, +/* 618 */, +/* 619 */ +/***/ (function(module) { + +module.exports = require("constants"); + +/***/ }), +/* 620 */, +/* 621 */ +/***/ (function(module) { + +"use strict"; + +module.exports = balanced; +function balanced(a, b, str) { + if (a instanceof RegExp) a = maybeMatch(a, str); + if (b instanceof RegExp) b = maybeMatch(b, str); + + var r = range(a, b, str); + + return r && { + start: r[0], + end: r[1], + pre: str.slice(0, r[0]), + body: str.slice(r[0] + a.length, r[1]), + post: str.slice(r[1] + b.length) + }; +} + +function maybeMatch(reg, str) { + var m = str.match(reg); + return m ? m[0] : null; +} + +balanced.range = range; +function range(a, b, str) { + var begs, beg, left, right, result; + var ai = str.indexOf(a); + var bi = str.indexOf(b, ai + 1); + var i = ai; + + if (ai >= 0 && bi > 0) { + begs = []; + left = str.length; + + while (i >= 0 && !result) { + if (i == ai) { + begs.push(i); + ai = str.indexOf(a, i + 1); + } else if (begs.length == 1) { + result = [ begs.pop(), bi ]; + } else { + beg = begs.pop(); + if (beg < left) { + left = beg; + right = bi; + } + + bi = str.indexOf(b, i + 1); } - }, { - renamed: ["repos", "createPagesSite"] - }], - enableVulnerabilityAlerts: ["PUT /repos/{owner}/{repo}/vulnerability-alerts", { - mediaType: { - previews: ["dorian"] + + i = ai < bi && ai >= 0 ? ai : bi; + } + + if (begs.length) { + result = [ left, right ]; + } + } + + return result; +} + + +/***/ }), +/* 622 */ +/***/ (function(module) { + +module.exports = require("path"); + +/***/ }), +/* 623 */, +/* 624 */, +/* 625 */, +/* 626 */, +/* 627 */, +/* 628 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const { stringify } = __webpack_require__(477) +const { outputFileSync } = __webpack_require__(294) + +function outputJsonSync (file, data, options) { + const str = stringify(data, options) + + outputFileSync(file, str, options) +} + +module.exports = outputJsonSync + + +/***/ }), +/* 629 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +function resolveJavascriptRegExp(data) { + if (data === null) return false; + if (data.length === 0) return false; + + var regexp = data, + tail = /\/([gim]*)$/.exec(data), + modifiers = ''; + + // if regexp starts with '/' it can have modifiers and must be properly closed + // `/foo/gim` - modifiers tail can be maximum 3 chars + if (regexp[0] === '/') { + if (tail) modifiers = tail[1]; + + if (modifiers.length > 3) return false; + // if expression starts with /, is should be properly terminated + if (regexp[regexp.length - modifiers.length - 1] !== '/') return false; + } + + return true; +} + +function constructJavascriptRegExp(data) { + var regexp = data, + tail = /\/([gim]*)$/.exec(data), + modifiers = ''; + + // `/foo/gim` - tail can be maximum 4 chars + if (regexp[0] === '/') { + if (tail) modifiers = tail[1]; + regexp = regexp.slice(1, regexp.length - modifiers.length - 1); + } + + return new RegExp(regexp, modifiers); +} + +function representJavascriptRegExp(object /*, style*/) { + var result = '/' + object.source + '/'; + + if (object.global) result += 'g'; + if (object.multiline) result += 'm'; + if (object.ignoreCase) result += 'i'; + + return result; +} + +function isRegExp(object) { + return Object.prototype.toString.call(object) === '[object RegExp]'; +} + +module.exports = new Type('tag:yaml.org,2002:js/regexp', { + kind: 'scalar', + resolve: resolveJavascriptRegExp, + construct: constructJavascriptRegExp, + predicate: isRegExp, + represent: representJavascriptRegExp +}); + + +/***/ }), +/* 630 */ +/***/ (function(module) { + +module.exports = require("perf_hooks"); + +/***/ }), +/* 631 */ +/***/ (function(module) { + +module.exports = require("net"); + +/***/ }), +/* 632 */, +/* 633 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +function resolveYamlMerge(data) { + return data === '<<' || data === null; +} + +module.exports = new Type('tag:yaml.org,2002:merge', { + kind: 'scalar', + resolve: resolveYamlMerge +}); + + +/***/ }), +/* 634 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var wrappy = __webpack_require__(293) +var reqs = Object.create(null) +var once = __webpack_require__(49) + +module.exports = wrappy(inflight) + +function inflight (key, cb) { + if (reqs[key]) { + reqs[key].push(cb) + return null + } else { + reqs[key] = [cb] + return makeres(key) + } +} + +function makeres (key) { + return once(function RES () { + var cbs = reqs[key] + var len = cbs.length + var args = slice(arguments) + + // XXX It's somewhat ambiguous whether a new callback added in this + // pass should be queued for later execution if something in the + // list of callbacks throws, or if it should just be discarded. + // However, it's such an edge case that it hardly matters, and either + // choice is likely as surprising as the other. + // As it happens, we do go ahead and schedule it for later execution. + try { + for (var i = 0; i < len; i++) { + cbs[i].apply(null, args) } - }], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions"], - getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], - getAllTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] + } finally { + if (cbs.length > len) { + // added more in the interim. + // de-zalgo, just in case, but don't call again. + cbs.splice(0, len) + process.nextTick(function () { + RES.apply(null, args) + }) + } else { + delete reqs[key] } - }], - getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], - getArchiveLink: ["GET /repos/{owner}/{repo}/{archive_format}/{ref}", {}, { - renamed: ["repos", "downloadArchive"] - }], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: ["GET /repos/{owner}/{repo}/collaborators/{username}/permission"], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + } + }) +} + +function slice (args) { + var length = args.length + var array = [] + + for (var i = 0; i < length; i++) array[i] = args[i] + return array +} + + +/***/ }), +/* 635 */, +/* 636 */ +/***/ (function(module) { + +/** + * it generates an ordered array of node's parents from top to bottom + * @param {Object} node the node object with its parents and children + * @param {Array} nodeList the nodeList to fill + */ +function parentChainFromNode(node) { + const result = node.parents + .reduce((acc, parent) => { + acc.push( + ...parentChainFromNode(parent) + .filter(e => e && !acc.find(a => a.project === e.project)) + .map(e => { + return { ...e }; + }) + ); + return acc; + }, []) + .filter(e => e); + result.push(node); + return result; +} + +/** + * it generates an ordered array of node's children from top to bottom + * @param {Object} node the node object with its parents and children + * @param {Array} nodeList the nodeList to fill + */ +function childChainFromNode(node, nodeList = []) { + const index = nodeList.findIndex(n => n.project === node.project); + if (index > -1) { + nodeList.splice(index, 1); + } + nodeList.push(node); + node.children.forEach(child => childChainFromNode(child, nodeList)); + return nodeList; +} + +/** + * + * @param {String} key the JSON.stringify key + * @param {Object} value the JSON.stringify value + * @param {Array} cache an empty array instance + */ +function jsonStringFunction(key, value, cache) { + { + return ["parents", "children"].includes(key) + ? value.map(node => { + if (!cache.includes(node.project)) { + cache.push(node.project); + return node; + } + return { + project: node.project, + warning: + "rest of the node information removed to avoid circular dependency problem. The node information is already defined in the json." + }; + }) + : value; + } +} + +module.exports = { + parentChainFromNode, + childChainFromNode, + jsonStringFunction +}; + + +/***/ }), +/* 637 */, +/* 638 */, +/* 639 */, +/* 640 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +module.exports = { + copySync: __webpack_require__(110) +} + + +/***/ }), +/* 641 */, +/* 642 */, +/* 643 */, +/* 644 */, +/* 645 */ +/***/ (function(module) { + +function getNodeTriggeringJob(context, nodeChain) { + return nodeChain.find( + e => + e.project === context.config.github.repository || + e.project === context.config.github.project + ); +} + +module.exports = { getNodeTriggeringJob }; + + +/***/ }), +/* 646 */, +/* 647 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __asyncValues = (this && this.__asyncValues) || function (o) { + if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined."); + var m = o[Symbol.asyncIterator], i; + return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i); + function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; } + function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); } +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const fs = __importStar(__webpack_require__(747)); +const zlib = __importStar(__webpack_require__(761)); +const util_1 = __webpack_require__(669); +const stat = util_1.promisify(fs.stat); +/** + * Creates a Gzip compressed file of an original file at the provided temporary filepath location + * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified + * @param {string} tempFilePath the location of where the Gzip file will be created + * @returns the size of gzip file that gets created + */ +function createGZipFileOnDisk(originalFilePath, tempFilePath) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve, reject) => { + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + const outputStream = fs.createWriteStream(tempFilePath); + inputStream.pipe(gzip).pipe(outputStream); + outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () { + // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload + const size = (yield stat(tempFilePath)).size; + resolve(size); + })); + outputStream.on('error', error => { + // eslint-disable-next-line no-console + console.log(error); + reject; + }); + }); + }); +} +exports.createGZipFileOnDisk = createGZipFileOnDisk; +/** + * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O + * @param originalFilePath the path to the original file that is being GZipped + * @returns a buffer with the GZip file + */ +function createGZipFileInBuffer(originalFilePath) { + return __awaiter(this, void 0, void 0, function* () { + return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { + var e_1, _a; + const inputStream = fs.createReadStream(originalFilePath); + const gzip = zlib.createGzip(); + inputStream.pipe(gzip); + // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 + const chunks = []; + try { + for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { + const chunk = gzip_1_1.value; + chunks.push(chunk); + } + } + catch (e_1_1) { e_1 = { error: e_1_1 }; } + finally { + try { + if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); + } + finally { if (e_1) throw e_1.error; } + } + resolve(Buffer.concat(chunks)); + })); + }); +} +exports.createGZipFileInBuffer = createGZipFileInBuffer; +//# sourceMappingURL=upload-gzip.js.map + +/***/ }), +/* 648 */, +/* 649 */, +/* 650 */, +/* 651 */, +/* 652 */, +/* 653 */, +/* 654 */ +/***/ (function(module) { + +// This is not the set of all possible signals. +// +// It IS, however, the set of all signals that trigger +// an exit on either Linux or BSD systems. Linux is a +// superset of the signal names supported on BSD, and +// the unknown signals just fail to register, so we can +// catch that easily enough. +// +// Don't bother with SIGKILL. It's uncatchable, which +// means that we can't fire any callbacks anyway. +// +// If a user does happen to register a handler on a non- +// fatal signal like SIGWINCH or something, and then +// exit, it'll end up firing `process.emit('exit')`, so +// the handler will be fired anyway. +// +// SIGBUS, SIGFPE, SIGSEGV and SIGILL, when not raised +// artificially, inherently leave the process in a +// state from which it is not safe to try and enter JS +// listeners. +module.exports = [ + 'SIGABRT', + 'SIGALRM', + 'SIGHUP', + 'SIGINT', + 'SIGTERM' +] + +if (process.platform !== 'win32') { + module.exports.push( + 'SIGVTALRM', + 'SIGXCPU', + 'SIGXFSZ', + 'SIGUSR2', + 'SIGTRAP', + 'SIGSYS', + 'SIGQUIT', + 'SIGIOT' + // should detect profiler and enable/disable accordingly. + // see #21 + // 'SIGPROF' + ) +} + +if (process.platform === 'linux') { + module.exports.push( + 'SIGIO', + 'SIGPOLL', + 'SIGPWR', + 'SIGSTKFLT', + 'SIGUNUSED' + ) +} + + +/***/ }), +/* 655 */, +/* 656 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { logger } = __webpack_require__(79); + +const groupBy = (checkoutInfo, key) => { + return Object.values(checkoutInfo).reduce((acc, checkInfo) => { + if (checkInfo) { + (acc[checkInfo[key]] = acc[checkInfo[key]] || []).push(checkInfo); + } + return acc; + }, {}); +}; + +function printCheckoutInformation(checkoutInfo) { + if (checkoutInfo && Object.keys(checkoutInfo).length) { + logger.info("----------------------------------------------"); + Object.entries(checkoutInfo).forEach(([project, checkInfo]) => + logger.info( + checkInfo + ? `${checkInfo.group}/${checkInfo.project}:${checkInfo.branch}.${ + checkInfo.merge + ? ` It has Been merged with ${checkInfo.targetGroup}/${checkInfo.project}:${checkInfo.targetBranch}` + : "" + }` + : `${project}: No checkout information` + ) + ); + logger.info("----------------------------------------------"); + Object.entries(groupBy(checkoutInfo, "branch")).forEach( + ([branch, checkoutInfoList]) => { + logger.info( + `Projects taken from branch "${branch}":${checkoutInfoList.map( + checkInfo => ` + ${checkInfo.group}/${checkInfo.project}${ + checkInfo.merge + ? `. Merged with ${checkInfo.targetGroup}/${checkInfo.project}:${checkInfo.targetBranch}` + : "" + }` + )}` + ); } - }], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContents: ["GET /repos/{owner}/{repo}/contents/{path}", {}, { - renamed: ["repos", "getContent"] - }], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentStatus: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}"], - getDownload: ["GET /repos/{owner}/{repo}/downloads/{download_id}"], - getHook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}", {}, { - renamed: ["repos", "getWebhook"] - }], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getProtectedBranchAdminEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { - renamed: ["repos", "getAdminBranchProtection"] - }], - getProtectedBranchPullRequestReviewEnforcement: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { - renamed: ["repos", "getPullRequestReviewProtection"] - }], - getProtectedBranchRequiredSignatures: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + ); + logger.info("----------------------------------------------"); + } +} + +module.exports = { + printCheckoutInformation +}; + + +/***/ }), +/* 657 */, +/* 658 */, +/* 659 */, +/* 660 */, +/* 661 */, +/* 662 */, +/* 663 */, +/* 664 */, +/* 665 */, +/* 666 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +let _fs +try { + _fs = __webpack_require__(598) +} catch (_) { + _fs = __webpack_require__(747) +} +const universalify = __webpack_require__(676) +const { stringify, stripBom } = __webpack_require__(477) + +async function _readFile (file, options = {}) { + if (typeof options === 'string') { + options = { encoding: options } + } + + const fs = options.fs || _fs + + const shouldThrow = 'throws' in options ? options.throws : true + + let data = await universalify.fromCallback(fs.readFile)(file, options) + + data = stripBom(data) + + let obj + try { + obj = JSON.parse(data, options ? options.reviver : null) + } catch (err) { + if (shouldThrow) { + err.message = `${file}: ${err.message}` + throw err + } else { + return null + } + } + + return obj +} + +const readFile = universalify.fromPromise(_readFile) + +function readFileSync (file, options = {}) { + if (typeof options === 'string') { + options = { encoding: options } + } + + const fs = options.fs || _fs + + const shouldThrow = 'throws' in options ? options.throws : true + + try { + let content = fs.readFileSync(file, options) + content = stripBom(content) + return JSON.parse(content, options.reviver) + } catch (err) { + if (shouldThrow) { + err.message = `${file}: ${err.message}` + throw err + } else { + return null + } + } +} + +async function _writeFile (file, obj, options = {}) { + const fs = options.fs || _fs + + const str = stringify(obj, options) + + await universalify.fromCallback(fs.writeFile)(file, str, options) +} + +const writeFile = universalify.fromPromise(_writeFile) + +function writeFileSync (file, obj, options = {}) { + const fs = options.fs || _fs + + const str = stringify(obj, options) + // not sure if fs.writeFileSync returns anything, but just in case + return fs.writeFileSync(file, str, options) +} + +const jsonfile = { + readFile, + readFileSync, + writeFile, + writeFileSync +} + +module.exports = jsonfile + + +/***/ }), +/* 667 */, +/* 668 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { + getDefinitionFile, + getStartingProject +} = __webpack_require__(933); + +function getInputs() { + return { + definitionFile: getDefinitionFile(), + startingProject: getStartingProject() + }; +} + +async function createCommonConfig(eventData, rootFolder, env) { + async function parseGitHub(eventData, env) { + return { + serverUrl: env["GITHUB_SERVER_URL"] + ? env["GITHUB_SERVER_URL"].replace(/\/$/, "") + : undefined, // https://github.com + action: env["GITHUB_ACTION"], // Ginxogithub-action-build-chain + sourceGroup: eventData.sourceGroup, + author: eventData.author, + actor: env["GITHUB_ACTOR"], // Ginxo + sourceBranch: env["GITHUB_HEAD_REF"], // Ginxo-patch-1 + targetBranch: env["GITHUB_BASE_REF"], // master + jobId: env["GITHUB_JOB"], // build-chain + sourceRepository: eventData.sourceRepository, + repository: env["GITHUB_REPOSITORY"], // Ginxo/lienzo-tests + group: env["GITHUB_REPOSITORY"].split("/")[0], // Ginxo + project: env["GITHUB_REPOSITORY"].split("/")[1], // lienzo-tests + groupProject: env["GITHUB_REPOSITORY"], + workflowName: env["GITHUB_WORKFLOW"], // Build Chain + ref: env["GITHUB_REF"], // refs/pull/1/merge' + inputs: getInputs() + }; + } + return { + github: await parseGitHub(eventData, env), + rootFolder: rootFolder === undefined ? "" : rootFolder + }; +} + +module.exports = { + createCommonConfig, + getInputs +}; + + +/***/ }), +/* 669 */ +/***/ (function(module) { + +module.exports = require("util"); + +/***/ }), +/* 670 */, +/* 671 */, +/* 672 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", { value: true }); +const assert_1 = __webpack_require__(357); +const fs = __webpack_require__(747); +const path = __webpack_require__(622); +_a = fs.promises, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +exports.IS_WINDOWS = process.platform === 'win32'; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Recursively create a directory at `fsPath`. + * + * This implementation is optimistic, meaning it attempts to create the full + * path first, and backs up the path stack from there. + * + * @param fsPath The path to create + * @param maxDepth The maximum recursion depth + * @param depth The current recursion depth + */ +function mkdirP(fsPath, maxDepth = 1000, depth = 1) { + return __awaiter(this, void 0, void 0, function* () { + assert_1.ok(fsPath, 'a path argument must be provided'); + fsPath = path.resolve(fsPath); + if (depth >= maxDepth) + return exports.mkdir(fsPath); + try { + yield exports.mkdir(fsPath); + return; + } + catch (err) { + switch (err.code) { + case 'ENOENT': { + yield mkdirP(path.dirname(fsPath), maxDepth, depth + 1); + yield exports.mkdir(fsPath); + return; + } + default: { + let stats; + try { + stats = yield exports.stat(fsPath); + } + catch (err2) { + throw err; + } + if (!stats.isDirectory()) + throw err; + } + } + } + }); +} +exports.mkdirP = mkdirP; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +//# sourceMappingURL=io-util.js.map + +/***/ }), +/* 673 */, +/* 674 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { ReadYamlException } = __webpack_require__(844); +const yaml = __webpack_require__(414); + +function read(fileContent) { + try { + return yaml.safeLoad(fileContent); + } catch (e) { + throw new ReadYamlException( + `error reading yaml file content. Error: ${e.message}` + ); + } +} + +module.exports = { read }; + + +/***/ }), +/* 675 */, +/* 676 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +exports.fromCallback = function (fn) { + return Object.defineProperty(function (...args) { + if (typeof args[args.length - 1] === 'function') fn.apply(this, args) + else { + return new Promise((resolve, reject) => { + fn.apply( + this, + args.concat([(err, res) => err ? reject(err) : resolve(res)]) + ) + }) + } + }, 'name', { value: fn.name }) +} + +exports.fromPromise = function (fn) { + return Object.defineProperty(function (...args) { + const cb = args[args.length - 1] + if (typeof cb !== 'function') return fn.apply(this, args) + else fn.apply(this, args.slice(0, -1)).then(r => cb(null, r), cb) + }, 'name', { value: fn.name }) +} + + +/***/ }), +/* 677 */, +/* 678 */, +/* 679 */, +/* 680 */, +/* 681 */ +/***/ (function(module) { + +"use strict"; + + +function posix(path) { + return path.charAt(0) === '/'; +} + +function win32(path) { + // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 + var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; + var result = splitDeviceRe.exec(path); + var device = result[1] || ''; + var isUnc = Boolean(device && device.charAt(1) !== ':'); + + // UNC paths are always absolute + return Boolean(result[2] || isUnc); +} + +module.exports = process.platform === 'win32' ? win32 : posix; +module.exports.posix = posix; +module.exports.win32 = win32; + + +/***/ }), +/* 682 */, +/* 683 */, +/* 684 */, +/* 685 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +/*eslint-disable no-use-before-define*/ + +var common = __webpack_require__(740); +var YAMLException = __webpack_require__(556); +var DEFAULT_FULL_SCHEMA = __webpack_require__(910); +var DEFAULT_SAFE_SCHEMA = __webpack_require__(723); + +var _toString = Object.prototype.toString; +var _hasOwnProperty = Object.prototype.hasOwnProperty; + +var CHAR_TAB = 0x09; /* Tab */ +var CHAR_LINE_FEED = 0x0A; /* LF */ +var CHAR_CARRIAGE_RETURN = 0x0D; /* CR */ +var CHAR_SPACE = 0x20; /* Space */ +var CHAR_EXCLAMATION = 0x21; /* ! */ +var CHAR_DOUBLE_QUOTE = 0x22; /* " */ +var CHAR_SHARP = 0x23; /* # */ +var CHAR_PERCENT = 0x25; /* % */ +var CHAR_AMPERSAND = 0x26; /* & */ +var CHAR_SINGLE_QUOTE = 0x27; /* ' */ +var CHAR_ASTERISK = 0x2A; /* * */ +var CHAR_COMMA = 0x2C; /* , */ +var CHAR_MINUS = 0x2D; /* - */ +var CHAR_COLON = 0x3A; /* : */ +var CHAR_EQUALS = 0x3D; /* = */ +var CHAR_GREATER_THAN = 0x3E; /* > */ +var CHAR_QUESTION = 0x3F; /* ? */ +var CHAR_COMMERCIAL_AT = 0x40; /* @ */ +var CHAR_LEFT_SQUARE_BRACKET = 0x5B; /* [ */ +var CHAR_RIGHT_SQUARE_BRACKET = 0x5D; /* ] */ +var CHAR_GRAVE_ACCENT = 0x60; /* ` */ +var CHAR_LEFT_CURLY_BRACKET = 0x7B; /* { */ +var CHAR_VERTICAL_LINE = 0x7C; /* | */ +var CHAR_RIGHT_CURLY_BRACKET = 0x7D; /* } */ + +var ESCAPE_SEQUENCES = {}; + +ESCAPE_SEQUENCES[0x00] = '\\0'; +ESCAPE_SEQUENCES[0x07] = '\\a'; +ESCAPE_SEQUENCES[0x08] = '\\b'; +ESCAPE_SEQUENCES[0x09] = '\\t'; +ESCAPE_SEQUENCES[0x0A] = '\\n'; +ESCAPE_SEQUENCES[0x0B] = '\\v'; +ESCAPE_SEQUENCES[0x0C] = '\\f'; +ESCAPE_SEQUENCES[0x0D] = '\\r'; +ESCAPE_SEQUENCES[0x1B] = '\\e'; +ESCAPE_SEQUENCES[0x22] = '\\"'; +ESCAPE_SEQUENCES[0x5C] = '\\\\'; +ESCAPE_SEQUENCES[0x85] = '\\N'; +ESCAPE_SEQUENCES[0xA0] = '\\_'; +ESCAPE_SEQUENCES[0x2028] = '\\L'; +ESCAPE_SEQUENCES[0x2029] = '\\P'; + +var DEPRECATED_BOOLEANS_SYNTAX = [ + 'y', 'Y', 'yes', 'Yes', 'YES', 'on', 'On', 'ON', + 'n', 'N', 'no', 'No', 'NO', 'off', 'Off', 'OFF' +]; + +function compileStyleMap(schema, map) { + var result, keys, index, length, tag, style, type; + + if (map === null) return {}; + + result = {}; + keys = Object.keys(map); + + for (index = 0, length = keys.length; index < length; index += 1) { + tag = keys[index]; + style = String(map[tag]); + + if (tag.slice(0, 2) === '!!') { + tag = 'tag:yaml.org,2002:' + tag.slice(2); + } + type = schema.compiledTypeMap['fallback'][tag]; + + if (type && _hasOwnProperty.call(type.styleAliases, style)) { + style = type.styleAliases[style]; + } + + result[tag] = style; + } + + return result; +} + +function encodeHex(character) { + var string, handle, length; + + string = character.toString(16).toUpperCase(); + + if (character <= 0xFF) { + handle = 'x'; + length = 2; + } else if (character <= 0xFFFF) { + handle = 'u'; + length = 4; + } else if (character <= 0xFFFFFFFF) { + handle = 'U'; + length = 8; + } else { + throw new YAMLException('code point within a string may not be greater than 0xFFFFFFFF'); + } + + return '\\' + handle + common.repeat('0', length - string.length) + string; +} + +function State(options) { + this.schema = options['schema'] || DEFAULT_FULL_SCHEMA; + this.indent = Math.max(1, (options['indent'] || 2)); + this.noArrayIndent = options['noArrayIndent'] || false; + this.skipInvalid = options['skipInvalid'] || false; + this.flowLevel = (common.isNothing(options['flowLevel']) ? -1 : options['flowLevel']); + this.styleMap = compileStyleMap(this.schema, options['styles'] || null); + this.sortKeys = options['sortKeys'] || false; + this.lineWidth = options['lineWidth'] || 80; + this.noRefs = options['noRefs'] || false; + this.noCompatMode = options['noCompatMode'] || false; + this.condenseFlow = options['condenseFlow'] || false; + + this.implicitTypes = this.schema.compiledImplicit; + this.explicitTypes = this.schema.compiledExplicit; + + this.tag = null; + this.result = ''; + + this.duplicates = []; + this.usedDuplicates = null; +} + +// Indents every line in a string. Empty lines (\n only) are not indented. +function indentString(string, spaces) { + var ind = common.repeat(' ', spaces), + position = 0, + next = -1, + result = '', + line, + length = string.length; + + while (position < length) { + next = string.indexOf('\n', position); + if (next === -1) { + line = string.slice(position); + position = length; + } else { + line = string.slice(position, next + 1); + position = next + 1; + } + + if (line.length && line !== '\n') result += ind; + + result += line; + } + + return result; +} + +function generateNextLine(state, level) { + return '\n' + common.repeat(' ', state.indent * level); +} + +function testImplicitResolving(state, str) { + var index, length, type; + + for (index = 0, length = state.implicitTypes.length; index < length; index += 1) { + type = state.implicitTypes[index]; + + if (type.resolve(str)) { + return true; + } + } + + return false; +} + +// [33] s-white ::= s-space | s-tab +function isWhitespace(c) { + return c === CHAR_SPACE || c === CHAR_TAB; +} + +// Returns true if the character can be printed without escaping. +// From YAML 1.2: "any allowed characters known to be non-printable +// should also be escaped. [However,] This isn’t mandatory" +// Derived from nb-char - \t - #x85 - #xA0 - #x2028 - #x2029. +function isPrintable(c) { + return (0x00020 <= c && c <= 0x00007E) + || ((0x000A1 <= c && c <= 0x00D7FF) && c !== 0x2028 && c !== 0x2029) + || ((0x0E000 <= c && c <= 0x00FFFD) && c !== 0xFEFF /* BOM */) + || (0x10000 <= c && c <= 0x10FFFF); +} + +// [34] ns-char ::= nb-char - s-white +// [27] nb-char ::= c-printable - b-char - c-byte-order-mark +// [26] b-char ::= b-line-feed | b-carriage-return +// [24] b-line-feed ::= #xA /* LF */ +// [25] b-carriage-return ::= #xD /* CR */ +// [3] c-byte-order-mark ::= #xFEFF +function isNsChar(c) { + return isPrintable(c) && !isWhitespace(c) + // byte-order-mark + && c !== 0xFEFF + // b-char + && c !== CHAR_CARRIAGE_RETURN + && c !== CHAR_LINE_FEED; +} + +// Simplified test for values allowed after the first character in plain style. +function isPlainSafe(c, prev) { + // Uses a subset of nb-char - c-flow-indicator - ":" - "#" + // where nb-char ::= c-printable - b-char - c-byte-order-mark. + return isPrintable(c) && c !== 0xFEFF + // - c-flow-indicator + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + // - ":" - "#" + // /* An ns-char preceding */ "#" + && c !== CHAR_COLON + && ((c !== CHAR_SHARP) || (prev && isNsChar(prev))); +} + +// Simplified test for values allowed as the first character in plain style. +function isPlainSafeFirst(c) { + // Uses a subset of ns-char - c-indicator + // where ns-char = nb-char - s-white. + return isPrintable(c) && c !== 0xFEFF + && !isWhitespace(c) // - s-white + // - (c-indicator ::= + // “-” | “?” | “:” | “,” | “[” | “]” | “{” | “}” + && c !== CHAR_MINUS + && c !== CHAR_QUESTION + && c !== CHAR_COLON + && c !== CHAR_COMMA + && c !== CHAR_LEFT_SQUARE_BRACKET + && c !== CHAR_RIGHT_SQUARE_BRACKET + && c !== CHAR_LEFT_CURLY_BRACKET + && c !== CHAR_RIGHT_CURLY_BRACKET + // | “#” | “&” | “*” | “!” | “|” | “=” | “>” | “'” | “"” + && c !== CHAR_SHARP + && c !== CHAR_AMPERSAND + && c !== CHAR_ASTERISK + && c !== CHAR_EXCLAMATION + && c !== CHAR_VERTICAL_LINE + && c !== CHAR_EQUALS + && c !== CHAR_GREATER_THAN + && c !== CHAR_SINGLE_QUOTE + && c !== CHAR_DOUBLE_QUOTE + // | “%” | “@” | “`”) + && c !== CHAR_PERCENT + && c !== CHAR_COMMERCIAL_AT + && c !== CHAR_GRAVE_ACCENT; +} + +// Determines whether block indentation indicator is required. +function needIndentIndicator(string) { + var leadingSpaceRe = /^\n* /; + return leadingSpaceRe.test(string); +} + +var STYLE_PLAIN = 1, + STYLE_SINGLE = 2, + STYLE_LITERAL = 3, + STYLE_FOLDED = 4, + STYLE_DOUBLE = 5; + +// Determines which scalar styles are possible and returns the preferred style. +// lineWidth = -1 => no limit. +// Pre-conditions: str.length > 0. +// Post-conditions: +// STYLE_PLAIN or STYLE_SINGLE => no \n are in the string. +// STYLE_LITERAL => no lines are suitable for folding (or lineWidth is -1). +// STYLE_FOLDED => a line > lineWidth and can be folded (and lineWidth != -1). +function chooseScalarStyle(string, singleLineOnly, indentPerLevel, lineWidth, testAmbiguousType) { + var i; + var char, prev_char; + var hasLineBreak = false; + var hasFoldableLine = false; // only checked if shouldTrackWidth + var shouldTrackWidth = lineWidth !== -1; + var previousLineBreak = -1; // count the first line correctly + var plain = isPlainSafeFirst(string.charCodeAt(0)) + && !isWhitespace(string.charCodeAt(string.length - 1)); + + if (singleLineOnly) { + // Case: no block styles. + // Check for disallowed characters to rule out plain and single. + for (i = 0; i < string.length; i++) { + char = string.charCodeAt(i); + if (!isPrintable(char)) { + return STYLE_DOUBLE; } - }, { - renamed: ["repos", "getCommitSignatureProtection"] - }], - getProtectedBranchRequiredStatusChecks: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "getStatusChecksProtection"] - }], - getProtectedBranchRestrictions: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { - renamed: ["repos", "getAccessRestrictions"] - }], - getPullRequestReviewProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getStatusChecksProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - getTeamsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams"], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users"], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - list: ["GET /user/repos", {}, { - renamed: ["repos", "listForAuthenticatedUser"] - }], - listAssetsForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets", {}, { - renamed: ["repos", "listReleaseAssets"] - }], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", { - mediaType: { - previews: ["groot"] + prev_char = i > 0 ? string.charCodeAt(i - 1) : null; + plain = plain && isPlainSafe(char, prev_char); + } + } else { + // Case: block styles permitted. + for (i = 0; i < string.length; i++) { + char = string.charCodeAt(i); + if (char === CHAR_LINE_FEED) { + hasLineBreak = true; + // Check if any line can be folded. + if (shouldTrackWidth) { + hasFoldableLine = hasFoldableLine || + // Foldable line = too long, and not more-indented. + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' '); + previousLineBreak = i; + } + } else if (!isPrintable(char)) { + return STYLE_DOUBLE; } - }], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/comments"], - listCommitComments: ["GET /repos/{owner}/{repo}/comments", {}, { - renamed: ["repos", "listCommitCommentsForRepo"] - }], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses"], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentStatuses: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses"], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listDownloads: ["GET /repos/{owner}/{repo}/downloads"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listHooks: ["GET /repos/{owner}/{repo}/hooks", {}, { - renamed: ["repos", "listWebhooks"] - }], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listProtectedBranchRequiredStatusChecksContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - renamed: ["repos", "getAllStatusCheckContexts"] - }], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", { - mediaType: { - previews: ["groot"] + prev_char = i > 0 ? string.charCodeAt(i - 1) : null; + plain = plain && isPlainSafe(char, prev_char); + } + // in case the end is missing a \n + hasFoldableLine = hasFoldableLine || (shouldTrackWidth && + (i - previousLineBreak - 1 > lineWidth && + string[previousLineBreak + 1] !== ' ')); + } + // Although every style can represent \n without escaping, prefer block styles + // for multiline, since they're more readable and they don't add empty lines. + // Also prefer folding a super-long line. + if (!hasLineBreak && !hasFoldableLine) { + // Strings interpretable as another type have to be quoted; + // e.g. the string 'true' vs. the boolean true. + return plain && !testAmbiguousType(string) + ? STYLE_PLAIN : STYLE_SINGLE; + } + // Edge case: block indentation indicator can only have one digit. + if (indentPerLevel > 9 && needIndentIndicator(string)) { + return STYLE_DOUBLE; + } + // At this point we know block styles are valid. + // Prefer literal style unless we want to fold. + return hasFoldableLine ? STYLE_FOLDED : STYLE_LITERAL; +} + +// Note: line breaking/folding is implemented for only the folded style. +// NB. We drop the last trailing newline (if any) of a returned block scalar +// since the dumper adds its own newline. This always works: +// • No ending newline => unaffected; already using strip "-" chomping. +// • Ending newline => removed then restored. +// Importantly, this keeps the "+" chomp indicator from gaining an extra line. +function writeScalar(state, string, level, iskey) { + state.dump = (function () { + if (string.length === 0) { + return "''"; + } + if (!state.noCompatMode && + DEPRECATED_BOOLEANS_SYNTAX.indexOf(string) !== -1) { + return "'" + string + "'"; + } + + var indent = state.indent * Math.max(1, level); // no 0-indent scalars + // As indentation gets deeper, let the width decrease monotonically + // to the lower bound min(state.lineWidth, 40). + // Note that this implies + // state.lineWidth ≤ 40 + state.indent: width is fixed at the lower bound. + // state.lineWidth > 40 + state.indent: width decreases until the lower bound. + // This behaves better than a constant minimum width which disallows narrower options, + // or an indent threshold which causes the width to suddenly increase. + var lineWidth = state.lineWidth === -1 + ? -1 : Math.max(Math.min(state.lineWidth, 40), state.lineWidth - indent); + + // Without knowing if keys are implicit/explicit, assume implicit for safety. + var singleLineOnly = iskey + // No block styles in flow mode. + || (state.flowLevel > -1 && level >= state.flowLevel); + function testAmbiguity(string) { + return testImplicitResolving(state, string); + } + + switch (chooseScalarStyle(string, singleLineOnly, state.indent, lineWidth, testAmbiguity)) { + case STYLE_PLAIN: + return string; + case STYLE_SINGLE: + return "'" + string.replace(/'/g, "''") + "'"; + case STYLE_LITERAL: + return '|' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(string, indent)); + case STYLE_FOLDED: + return '>' + blockHeader(string, state.indent) + + dropEndingNewline(indentString(foldString(string, lineWidth), indent)); + case STYLE_DOUBLE: + return '"' + escapeString(string, lineWidth) + '"'; + default: + throw new YAMLException('impossible error: invalid scalar style'); + } + }()); +} + +// Pre-conditions: string is valid for a block scalar, 1 <= indentPerLevel <= 9. +function blockHeader(string, indentPerLevel) { + var indentIndicator = needIndentIndicator(string) ? String(indentPerLevel) : ''; + + // note the special case: the string '\n' counts as a "trailing" empty line. + var clip = string[string.length - 1] === '\n'; + var keep = clip && (string[string.length - 2] === '\n' || string === '\n'); + var chomp = keep ? '+' : (clip ? '' : '-'); + + return indentIndicator + chomp + '\n'; +} + +// (See the note for writeScalar.) +function dropEndingNewline(string) { + return string[string.length - 1] === '\n' ? string.slice(0, -1) : string; +} + +// Note: a long line without a suitable break point will exceed the width limit. +// Pre-conditions: every char in str isPrintable, str.length > 0, width > 0. +function foldString(string, width) { + // In folded style, $k$ consecutive newlines output as $k+1$ newlines— + // unless they're before or after a more-indented line, or at the very + // beginning or end, in which case $k$ maps to $k$. + // Therefore, parse each chunk as newline(s) followed by a content line. + var lineRe = /(\n+)([^\n]*)/g; + + // first line (possibly an empty line) + var result = (function () { + var nextLF = string.indexOf('\n'); + nextLF = nextLF !== -1 ? nextLF : string.length; + lineRe.lastIndex = nextLF; + return foldLine(string.slice(0, nextLF), width); + }()); + // If we haven't reached the first content line yet, don't add an extra \n. + var prevMoreIndented = string[0] === '\n' || string[0] === ' '; + var moreIndented; + + // rest of the lines + var match; + while ((match = lineRe.exec(string))) { + var prefix = match[1], line = match[2]; + moreIndented = (line[0] === ' '); + result += prefix + + (!prevMoreIndented && !moreIndented && line !== '' + ? '\n' : '') + + foldLine(line, width); + prevMoreIndented = moreIndented; + } + + return result; +} + +// Greedy line breaking. +// Picks the longest line under the limit each time, +// otherwise settles for the shortest line over the limit. +// NB. More-indented lines *cannot* be folded, as that would add an extra \n. +function foldLine(line, width) { + if (line === '' || line[0] === ' ') return line; + + // Since a more-indented line adds a \n, breaks can't be followed by a space. + var breakRe = / [^ ]/g; // note: the match index will always be <= length-2. + var match; + // start is an inclusive index. end, curr, and next are exclusive. + var start = 0, end, curr = 0, next = 0; + var result = ''; + + // Invariants: 0 <= start <= length-1. + // 0 <= curr <= next <= max(0, length-2). curr - start <= width. + // Inside the loop: + // A match implies length >= 2, so curr and next are <= length-2. + while ((match = breakRe.exec(line))) { + next = match.index; + // maintain invariant: curr - start <= width + if (next - start > width) { + end = (curr > start) ? curr : next; // derive end <= length-2 + result += '\n' + line.slice(start, end); + // skip the space that was output as \n + start = end + 1; // derive start <= length-1 + } + curr = next; + } + + // By the invariants, start <= length-1, so there is something left over. + // It is either the whole string or a part starting from non-whitespace. + result += '\n'; + // Insert a break if the remainder is too long and there is a break available. + if (line.length - start > width && curr > start) { + result += line.slice(start, curr) + '\n' + line.slice(curr + 1); + } else { + result += line.slice(start); + } + + return result.slice(1); // drop extra \n joiner +} + +// Escapes a double-quoted string. +function escapeString(string) { + var result = ''; + var char, nextChar; + var escapeSeq; + + for (var i = 0; i < string.length; i++) { + char = string.charCodeAt(i); + // Check for surrogate pairs (reference Unicode 3.0 section "3.7 Surrogates"). + if (char >= 0xD800 && char <= 0xDBFF/* high surrogate */) { + nextChar = string.charCodeAt(i + 1); + if (nextChar >= 0xDC00 && nextChar <= 0xDFFF/* low surrogate */) { + // Combine the surrogate pair and store it escaped. + result += encodeHex((char - 0xD800) * 0x400 + nextChar - 0xDC00 + 0x10000); + // Advance index one extra since we already used that char here. + i++; continue; } - }], - listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listStatusesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/statuses", {}, { - renamed: ["repos", "listCommitStatusesForRef"] - }], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listTopics: ["GET /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] + } + escapeSeq = ESCAPE_SEQUENCES[char]; + result += !escapeSeq && isPrintable(char) + ? string[i] + : escapeSeq || encodeHex(char); + } + + return result; +} + +function writeFlowSequence(state, level, object) { + var _result = '', + _tag = state.tag, + index, + length; + + for (index = 0, length = object.length; index < length; index += 1) { + // Write only valid elements. + if (writeNode(state, level, object[index], false, false)) { + if (index !== 0) _result += ',' + (!state.condenseFlow ? ' ' : ''); + _result += state.dump; + } + } + + state.tag = _tag; + state.dump = '[' + _result + ']'; +} + +function writeBlockSequence(state, level, object, compact) { + var _result = '', + _tag = state.tag, + index, + length; + + for (index = 0, length = object.length; index < length; index += 1) { + // Write only valid elements. + if (writeNode(state, level + 1, object[index], true, true)) { + if (!compact || index !== 0) { + _result += generateNextLine(state, level); } - }, { - renamed: ["repos", "getAllTopics"] - }], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - pingHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings", {}, { - renamed: ["repos", "pingWebhook"] - }], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - removeAppAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - removeBranchProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection", {}, { - renamed: ["repos", "deleteBranchProtection"] - }], - removeCollaborator: ["DELETE /repos/{owner}/{repo}/collaborators/{username}"], - removeDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}", {}, { - renamed: ["repos", "deleteDeployKey"] - }], - removeProtectedBranchAdminEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins", {}, { - renamed: ["repos", "deleteAdminBranchProtection"] - }], - removeProtectedBranchAppRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps", - renamed: ["repos", "removeAppAccessRestrictions"] - }], - removeProtectedBranchPullRequestReviewEnforcement: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { - renamed: ["repos", "deletePullRequestReviewProtection"] - }], - removeProtectedBranchRequiredSignatures: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures", { - mediaType: { - previews: ["zzzax"] + + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + _result += '-'; + } else { + _result += '- '; } - }, { - renamed: ["repos", "deleteCommitSignatureProtection"] - }], - removeProtectedBranchRequiredStatusChecks: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "removeStatusChecksProtection"] - }], - removeProtectedBranchRequiredStatusChecksContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts", - renamed: ["repos", "removeStatusCheckContexts"] - }], - removeProtectedBranchRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions", {}, { - renamed: ["repos", "deleteAccessRestrictions"] - }], - removeProtectedBranchTeamRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams", - renamed: ["repos", "removeTeamAccessRestrictions"] - }], - removeProtectedBranchUserRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users", - renamed: ["repos", "removeUserAccessRestrictions"] - }], - removeStatusCheckContexts: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - removeStatusCheckProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - removeTeamAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] + + _result += state.dump; + } + } + + state.tag = _tag; + state.dump = _result || '[]'; // Empty sequence if no valid values. +} + +function writeFlowMapping(state, level, object) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + pairBuffer; + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + + pairBuffer = ''; + if (index !== 0) pairBuffer += ', '; + + if (state.condenseFlow) pairBuffer += '"'; + + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + + if (!writeNode(state, level, objectKey, false, false)) { + continue; // Skip this pair because of invalid key; + } + + if (state.dump.length > 1024) pairBuffer += '? '; + + pairBuffer += state.dump + (state.condenseFlow ? '"' : '') + ':' + (state.condenseFlow ? '' : ' '); + + if (!writeNode(state, level, objectValue, false, false)) { + continue; // Skip this pair because of invalid value. + } + + pairBuffer += state.dump; + + // Both key and value are valid. + _result += pairBuffer; + } + + state.tag = _tag; + state.dump = '{' + _result + '}'; +} + +function writeBlockMapping(state, level, object, compact) { + var _result = '', + _tag = state.tag, + objectKeyList = Object.keys(object), + index, + length, + objectKey, + objectValue, + explicitPair, + pairBuffer; + + // Allow sorting keys so that the output file is deterministic + if (state.sortKeys === true) { + // Default sorting + objectKeyList.sort(); + } else if (typeof state.sortKeys === 'function') { + // Custom sort function + objectKeyList.sort(state.sortKeys); + } else if (state.sortKeys) { + // Something is wrong + throw new YAMLException('sortKeys must be a boolean or a function'); + } + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + pairBuffer = ''; + + if (!compact || index !== 0) { + pairBuffer += generateNextLine(state, level); + } + + objectKey = objectKeyList[index]; + objectValue = object[objectKey]; + + if (!writeNode(state, level + 1, objectKey, true, true, true)) { + continue; // Skip this pair because of invalid key. + } + + explicitPair = (state.tag !== null && state.tag !== '?') || + (state.dump && state.dump.length > 1024); + + if (explicitPair) { + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += '?'; + } else { + pairBuffer += '? '; } - }], - replaceProtectedBranchAppRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps", - renamed: ["repos", "setAppAccessRestrictions"] - }], - replaceProtectedBranchRequiredStatusChecksContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts", - renamed: ["repos", "setStatusCheckContexts"] - }], - replaceProtectedBranchTeamRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams", - renamed: ["repos", "setTeamAccessRestrictions"] - }], - replaceProtectedBranchUserRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users", - renamed: ["repos", "setUserAccessRestrictions"] - }], - replaceTopics: ["PUT /repos/{owner}/{repo}/topics", { - mediaType: { - previews: ["mercy"] + } + + pairBuffer += state.dump; + + if (explicitPair) { + pairBuffer += generateNextLine(state, level); + } + + if (!writeNode(state, level + 1, objectValue, true, explicitPair)) { + continue; // Skip this pair because of invalid value. + } + + if (state.dump && CHAR_LINE_FEED === state.dump.charCodeAt(0)) { + pairBuffer += ':'; + } else { + pairBuffer += ': '; + } + + pairBuffer += state.dump; + + // Both key and value are valid. + _result += pairBuffer; + } + + state.tag = _tag; + state.dump = _result || '{}'; // Empty mapping if no valid pairs. +} + +function detectType(state, object, explicit) { + var _result, typeList, index, length, type, style; + + typeList = explicit ? state.explicitTypes : state.implicitTypes; + + for (index = 0, length = typeList.length; index < length; index += 1) { + type = typeList[index]; + + if ((type.instanceOf || type.predicate) && + (!type.instanceOf || ((typeof object === 'object') && (object instanceof type.instanceOf))) && + (!type.predicate || type.predicate(object))) { + + state.tag = explicit ? type.tag : '?'; + + if (type.represent) { + style = state.styleMap[type.tag] || type.defaultStyle; + + if (_toString.call(type.represent) === '[object Function]') { + _result = type.represent(object, style); + } else if (_hasOwnProperty.call(type.represent, style)) { + _result = type.represent[style](object, style); + } else { + throw new YAMLException('!<' + type.tag + '> tag resolver accepts not "' + style + '" style'); + } + + state.dump = _result; } - }, { - renamed: ["repos", "replaceAllTopics"] - }], - requestPageBuild: ["POST /repos/{owner}/{repo}/pages/builds", {}, { - renamed: ["repos", "requestPagesBuild"] - }], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - retrieveCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile", {}, { - renamed: ["repos", "getCommunityProfileMetrics"] - }], - setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], - setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { - mapToData: "apps" - }], - setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, { - mapToData: "contexts" - }], - setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, { - mapToData: "teams" - }], - setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, { - mapToData: "users" - }], - testPushHook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests", {}, { - renamed: ["repos", "testPushWebhook"] - }], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateHook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}", {}, { - renamed: ["repos", "updateWebhook"] - }], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"], - updateProtectedBranchPullRequestReviewEnforcement: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews", {}, { - renamed: ["repos", "updatePullRequestReviewProtection"] - }], - updateProtectedBranchRequiredStatusChecks: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, { - renamed: ["repos", "updateStatusChecksProtection"] - }], - updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"], - updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", { - baseUrl: "https://uploads.github.com" - }] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits", { - mediaType: { - previews: ["cloak"] + + return true; + } + } + + return false; +} + +// Serializes `object` and writes it to global `result`. +// Returns true on success, or false on invalid object. +// +function writeNode(state, level, object, block, compact, iskey) { + state.tag = null; + state.dump = object; + + if (!detectType(state, object, false)) { + detectType(state, object, true); + } + + var type = _toString.call(state.dump); + + if (block) { + block = (state.flowLevel < 0 || state.flowLevel > level); + } + + var objectOrArray = type === '[object Object]' || type === '[object Array]', + duplicateIndex, + duplicate; + + if (objectOrArray) { + duplicateIndex = state.duplicates.indexOf(object); + duplicate = duplicateIndex !== -1; + } + + if ((state.tag !== null && state.tag !== '?') || duplicate || (state.indent !== 2 && level > 0)) { + compact = false; + } + + if (duplicate && state.usedDuplicates[duplicateIndex]) { + state.dump = '*ref_' + duplicateIndex; + } else { + if (objectOrArray && duplicate && !state.usedDuplicates[duplicateIndex]) { + state.usedDuplicates[duplicateIndex] = true; + } + if (type === '[object Object]') { + if (block && (Object.keys(state.dump).length !== 0)) { + writeBlockMapping(state, level, state.dump, compact); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowMapping(state, level, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } } - }], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - teams: { - addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"], - addOrUpdateMembershipInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { - renamed: ["teams", "addOrUpdateMembershipForUserInOrg"] - }], - addOrUpdateProjectInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } else if (type === '[object Array]') { + var arrayLevel = (state.noArrayIndent && (level > 0)) ? level - 1 : level; + if (block && (state.dump.length !== 0)) { + writeBlockSequence(state, arrayLevel, state.dump, compact); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + state.dump; + } + } else { + writeFlowSequence(state, arrayLevel, state.dump); + if (duplicate) { + state.dump = '&ref_' + duplicateIndex + ' ' + state.dump; + } } - }, { - renamed: ["teams", "addOrUpdateProjectPermissionsInOrg"] - }], - addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } else if (type === '[object String]') { + if (state.tag !== '?') { + writeScalar(state, state.dump, level, iskey); + } + } else { + if (state.skipInvalid) return false; + throw new YAMLException('unacceptable kind of an object to dump ' + type); + } + + if (state.tag !== null && state.tag !== '?') { + state.dump = '!<' + state.tag + '> ' + state.dump; + } + } + + return true; +} + +function getDuplicateReferences(object, state) { + var objects = [], + duplicatesIndexes = [], + index, + length; + + inspectNode(object, objects, duplicatesIndexes); + + for (index = 0, length = duplicatesIndexes.length; index < length; index += 1) { + state.duplicates.push(objects[duplicatesIndexes[index]]); + } + state.usedDuplicates = new Array(length); +} + +function inspectNode(object, objects, duplicatesIndexes) { + var objectKeyList, + index, + length; + + if (object !== null && typeof object === 'object') { + index = objects.indexOf(object); + if (index !== -1) { + if (duplicatesIndexes.indexOf(index) === -1) { + duplicatesIndexes.push(index); } - }], - addOrUpdateRepoInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { - renamed: ["teams", "addOrUpdateRepoPermissionsInOrg"] - }], - addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - checkManagesRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}", {}, { - renamed: ["teams", "checkPermissionsForRepoInOrg"] - }], - checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } else { + objects.push(object); + + if (Array.isArray(object)) { + for (index = 0, length = object.length; index < length; index += 1) { + inspectNode(object[index], objects, duplicatesIndexes); + } + } else { + objectKeyList = Object.keys(object); + + for (index = 0, length = objectKeyList.length; index < length; index += 1) { + inspectNode(object[objectKeyList[index]], objects, duplicatesIndexes); + } } - }], - checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"], - getMembershipInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { - renamed: ["teams", "getMembershipForUserInOrg"] - }], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects", { - mediaType: { - previews: ["inertia"] + } + } +} + +function dump(input, options) { + options = options || {}; + + var state = new State(options); + + if (!state.noRefs) getDuplicateReferences(input, state); + + if (writeNode(state, 0, input, true, true)) return state.dump + '\n'; + + return ''; +} + +function safeDump(input, options) { + return dump(input, common.extend({ schema: DEFAULT_SAFE_SCHEMA }, options)); +} + +module.exports.dump = dump; +module.exports.safeDump = safeDump; + + +/***/ }), +/* 686 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; + result["default"] = mod; + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const os = __importStar(__webpack_require__(87)); +const events = __importStar(__webpack_require__(614)); +const child = __importStar(__webpack_require__(129)); +const path = __importStar(__webpack_require__(622)); +const io = __importStar(__webpack_require__(1)); +const ioUtil = __importStar(__webpack_require__(672)); +/* eslint-disable @typescript-eslint/unbound-method */ +const IS_WINDOWS = process.platform === 'win32'; +/* + * Class for running command line tools. Handles quoting and arg parsing in a platform agnostic way. + */ +class ToolRunner extends events.EventEmitter { + constructor(toolPath, args, options) { + super(); + if (!toolPath) { + throw new Error("Parameter 'toolPath' cannot be null or empty."); + } + this.toolPath = toolPath; + this.args = args || []; + this.options = options || {}; + } + _debug(message) { + if (this.options.listeners && this.options.listeners.debug) { + this.options.listeners.debug(message); + } + } + _getCommandString(options, noPrefix) { + const toolPath = this._getSpawnFileName(); + const args = this._getSpawnArgs(options); + let cmd = noPrefix ? '' : '[command]'; // omit prefix when piped to a second tool + if (IS_WINDOWS) { + // Windows + cmd file + if (this._isCmdFile()) { + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows + verbatim + else if (options.windowsVerbatimArguments) { + cmd += `"${toolPath}"`; + for (const a of args) { + cmd += ` ${a}`; + } + } + // Windows (regular) + else { + cmd += this._windowsQuoteCmdArg(toolPath); + for (const a of args) { + cmd += ` ${this._windowsQuoteCmdArg(a)}`; + } + } + } + else { + // OSX/Linux - this can likely be improved with some form of quoting. + // creating processes on Unix is fundamentally different than Windows. + // on Unix, execvp() takes an arg array. + cmd += toolPath; + for (const a of args) { + cmd += ` ${a}`; + } + } + return cmd; + } + _processLineBuffer(data, strBuffer, onLine) { + try { + let s = strBuffer + data.toString(); + let n = s.indexOf(os.EOL); + while (n > -1) { + const line = s.substring(0, n); + onLine(line); + // the rest of the string ... + s = s.substring(n + os.EOL.length); + n = s.indexOf(os.EOL); + } + strBuffer = s; + } + catch (err) { + // streaming lines to console is best effort. Don't fail a build. + this._debug(`error processing line. Failed with error ${err}`); + } + } + _getSpawnFileName() { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + return process.env['COMSPEC'] || 'cmd.exe'; + } + } + return this.toolPath; + } + _getSpawnArgs(options) { + if (IS_WINDOWS) { + if (this._isCmdFile()) { + let argline = `/D /S /C "${this._windowsQuoteCmdArg(this.toolPath)}`; + for (const a of this.args) { + argline += ' '; + argline += options.windowsVerbatimArguments + ? a + : this._windowsQuoteCmdArg(a); + } + argline += '"'; + return [argline]; + } + } + return this.args; + } + _endsWith(str, end) { + return str.endsWith(end); + } + _isCmdFile() { + const upperToolPath = this.toolPath.toUpperCase(); + return (this._endsWith(upperToolPath, '.CMD') || + this._endsWith(upperToolPath, '.BAT')); + } + _windowsQuoteCmdArg(arg) { + // for .exe, apply the normal quoting rules that libuv applies + if (!this._isCmdFile()) { + return this._uvQuoteCmdArg(arg); + } + // otherwise apply quoting rules specific to the cmd.exe command line parser. + // the libuv rules are generic and are not designed specifically for cmd.exe + // command line parser. + // + // for a detailed description of the cmd.exe command line parser, refer to + // http://stackoverflow.com/questions/4094699/how-does-the-windows-command-interpreter-cmd-exe-parse-scripts/7970912#7970912 + // need quotes for empty arg + if (!arg) { + return '""'; + } + // determine whether the arg needs to be quoted + const cmdSpecialChars = [ + ' ', + '\t', + '&', + '(', + ')', + '[', + ']', + '{', + '}', + '^', + '=', + ';', + '!', + "'", + '+', + ',', + '`', + '~', + '|', + '<', + '>', + '"' + ]; + let needsQuotes = false; + for (const char of arg) { + if (cmdSpecialChars.some(x => x === char)) { + needsQuotes = true; + break; + } + } + // short-circuit if quotes not needed + if (!needsQuotes) { + return arg; + } + // the following quoting rules are very similar to the rules that by libuv applies. + // + // 1) wrap the string in quotes + // + // 2) double-up quotes - i.e. " => "" + // + // this is different from the libuv quoting rules. libuv replaces " with \", which unfortunately + // doesn't work well with a cmd.exe command line. + // + // note, replacing " with "" also works well if the arg is passed to a downstream .NET console app. + // for example, the command line: + // foo.exe "myarg:""my val""" + // is parsed by a .NET console app into an arg array: + // [ "myarg:\"my val\"" ] + // which is the same end result when applying libuv quoting rules. although the actual + // command line from libuv quoting rules would look like: + // foo.exe "myarg:\"my val\"" + // + // 3) double-up slashes that precede a quote, + // e.g. hello \world => "hello \world" + // hello\"world => "hello\\""world" + // hello\\"world => "hello\\\\""world" + // hello world\ => "hello world\\" + // + // technically this is not required for a cmd.exe command line, or the batch argument parser. + // the reasons for including this as a .cmd quoting rule are: + // + // a) this is optimized for the scenario where the argument is passed from the .cmd file to an + // external program. many programs (e.g. .NET console apps) rely on the slash-doubling rule. + // + // b) it's what we've been doing previously (by deferring to node default behavior) and we + // haven't heard any complaints about that aspect. + // + // note, a weakness of the quoting rules chosen here, is that % is not escaped. in fact, % cannot be + // escaped when used on the command line directly - even though within a .cmd file % can be escaped + // by using %%. + // + // the saving grace is, on the command line, %var% is left as-is if var is not defined. this contrasts + // the line parsing rules within a .cmd file, where if var is not defined it is replaced with nothing. + // + // one option that was explored was replacing % with ^% - i.e. %var% => ^%var^%. this hack would + // often work, since it is unlikely that var^ would exist, and the ^ character is removed when the + // variable is used. the problem, however, is that ^ is not removed when %* is used to pass the args + // to an external program. + // + // an unexplored potential solution for the % escaping problem, is to create a wrapper .cmd file. + // % can be escaped within a .cmd file. + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; // double the slash + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '"'; // double the quote + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _uvQuoteCmdArg(arg) { + // Tool runner wraps child_process.spawn() and needs to apply the same quoting as + // Node in certain cases where the undocumented spawn option windowsVerbatimArguments + // is used. + // + // Since this function is a port of quote_cmd_arg from Node 4.x (technically, lib UV, + // see https://github.com/nodejs/node/blob/v4.x/deps/uv/src/win/process.c for details), + // pasting copyright notice from Node within this function: + // + // Copyright Joyent, Inc. and other Node contributors. All rights reserved. + // + // Permission is hereby granted, free of charge, to any person obtaining a copy + // of this software and associated documentation files (the "Software"), to + // deal in the Software without restriction, including without limitation the + // rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + // sell copies of the Software, and to permit persons to whom the Software is + // furnished to do so, subject to the following conditions: + // + // The above copyright notice and this permission notice shall be included in + // all copies or substantial portions of the Software. + // + // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + // FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + // IN THE SOFTWARE. + if (!arg) { + // Need double quotation for empty argument + return '""'; + } + if (!arg.includes(' ') && !arg.includes('\t') && !arg.includes('"')) { + // No quotation needed + return arg; + } + if (!arg.includes('"') && !arg.includes('\\')) { + // No embedded double quotes or backslashes, so I can just wrap + // quote marks around the whole thing. + return `"${arg}"`; + } + // Expected input/output: + // input : hello"world + // output: "hello\"world" + // input : hello""world + // output: "hello\"\"world" + // input : hello\world + // output: hello\world + // input : hello\\world + // output: hello\\world + // input : hello\"world + // output: "hello\\\"world" + // input : hello\\"world + // output: "hello\\\\\"world" + // input : hello world\ + // output: "hello world\\" - note the comment in libuv actually reads "hello world\" + // but it appears the comment is wrong, it should be "hello world\\" + let reverse = '"'; + let quoteHit = true; + for (let i = arg.length; i > 0; i--) { + // walk the string in reverse + reverse += arg[i - 1]; + if (quoteHit && arg[i - 1] === '\\') { + reverse += '\\'; + } + else if (arg[i - 1] === '"') { + quoteHit = true; + reverse += '\\'; + } + else { + quoteHit = false; + } + } + reverse += '"'; + return reverse + .split('') + .reverse() + .join(''); + } + _cloneExecOptions(options) { + options = options || {}; + const result = { + cwd: options.cwd || process.cwd(), + env: options.env || process.env, + silent: options.silent || false, + windowsVerbatimArguments: options.windowsVerbatimArguments || false, + failOnStdErr: options.failOnStdErr || false, + ignoreReturnCode: options.ignoreReturnCode || false, + delay: options.delay || 10000 + }; + result.outStream = options.outStream || process.stdout; + result.errStream = options.errStream || process.stderr; + return result; + } + _getSpawnOptions(options, toolPath) { + options = options || {}; + const result = {}; + result.cwd = options.cwd; + result.env = options.env; + result['windowsVerbatimArguments'] = + options.windowsVerbatimArguments || this._isCmdFile(); + if (options.windowsVerbatimArguments) { + result.argv0 = `"${toolPath}"`; + } + return result; + } + /** + * Exec a tool. + * Output will be streamed to the live console. + * Returns promise with return code + * + * @param tool path to tool to exec + * @param options optional exec options. See ExecOptions + * @returns number + */ + exec() { + return __awaiter(this, void 0, void 0, function* () { + // root the tool path if it is unrooted and contains relative pathing + if (!ioUtil.isRooted(this.toolPath) && + (this.toolPath.includes('/') || + (IS_WINDOWS && this.toolPath.includes('\\')))) { + // prefer options.cwd if it is specified, however options.cwd may also need to be rooted + this.toolPath = path.resolve(process.cwd(), this.options.cwd || process.cwd(), this.toolPath); + } + // if the tool is only a file name, then resolve it from the PATH + // otherwise verify it exists (add extension on Windows if necessary) + this.toolPath = yield io.which(this.toolPath, true); + return new Promise((resolve, reject) => { + this._debug(`exec tool: ${this.toolPath}`); + this._debug('arguments:'); + for (const arg of this.args) { + this._debug(` ${arg}`); + } + const optionsNonNull = this._cloneExecOptions(this.options); + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(this._getCommandString(optionsNonNull) + os.EOL); + } + const state = new ExecState(optionsNonNull, this.toolPath); + state.on('debug', (message) => { + this._debug(message); + }); + const fileName = this._getSpawnFileName(); + const cp = child.spawn(fileName, this._getSpawnArgs(optionsNonNull), this._getSpawnOptions(this.options, fileName)); + const stdbuffer = ''; + if (cp.stdout) { + cp.stdout.on('data', (data) => { + if (this.options.listeners && this.options.listeners.stdout) { + this.options.listeners.stdout(data); + } + if (!optionsNonNull.silent && optionsNonNull.outStream) { + optionsNonNull.outStream.write(data); + } + this._processLineBuffer(data, stdbuffer, (line) => { + if (this.options.listeners && this.options.listeners.stdline) { + this.options.listeners.stdline(line); + } + }); + }); + } + const errbuffer = ''; + if (cp.stderr) { + cp.stderr.on('data', (data) => { + state.processStderr = true; + if (this.options.listeners && this.options.listeners.stderr) { + this.options.listeners.stderr(data); + } + if (!optionsNonNull.silent && + optionsNonNull.errStream && + optionsNonNull.outStream) { + const s = optionsNonNull.failOnStdErr + ? optionsNonNull.errStream + : optionsNonNull.outStream; + s.write(data); + } + this._processLineBuffer(data, errbuffer, (line) => { + if (this.options.listeners && this.options.listeners.errline) { + this.options.listeners.errline(line); + } + }); + }); + } + cp.on('error', (err) => { + state.processError = err.message; + state.processExited = true; + state.processClosed = true; + state.CheckComplete(); + }); + cp.on('exit', (code) => { + state.processExitCode = code; + state.processExited = true; + this._debug(`Exit code ${code} received from tool '${this.toolPath}'`); + state.CheckComplete(); + }); + cp.on('close', (code) => { + state.processExitCode = code; + state.processExited = true; + state.processClosed = true; + this._debug(`STDIO streams have closed for tool '${this.toolPath}'`); + state.CheckComplete(); + }); + state.on('done', (error, exitCode) => { + if (stdbuffer.length > 0) { + this.emit('stdline', stdbuffer); + } + if (errbuffer.length > 0) { + this.emit('errline', errbuffer); + } + cp.removeAllListeners(); + if (error) { + reject(error); + } + else { + resolve(exitCode); + } + }); + if (this.options.input) { + if (!cp.stdin) { + throw new Error('child process missing stdin'); + } + cp.stdin.end(this.options.input); + } + }); + }); + } +} +exports.ToolRunner = ToolRunner; +/** + * Convert an arg string to an array of args. Handles escaping + * + * @param argString string of arguments + * @returns string[] array of arguments + */ +function argStringToArray(argString) { + const args = []; + let inQuotes = false; + let escaped = false; + let arg = ''; + function append(c) { + // we only escape double quotes. + if (escaped && c !== '"') { + arg += '\\'; + } + arg += c; + escaped = false; + } + for (let i = 0; i < argString.length; i++) { + const c = argString.charAt(i); + if (c === '"') { + if (!escaped) { + inQuotes = !inQuotes; + } + else { + append(c); + } + continue; + } + if (c === '\\' && escaped) { + append(c); + continue; + } + if (c === '\\' && inQuotes) { + escaped = true; + continue; + } + if (c === ' ' && !inQuotes) { + if (arg.length > 0) { + args.push(arg); + arg = ''; + } + continue; + } + append(c); + } + if (arg.length > 0) { + args.push(arg.trim()); + } + return args; +} +exports.argStringToArray = argStringToArray; +class ExecState extends events.EventEmitter { + constructor(options, toolPath) { + super(); + this.processClosed = false; // tracks whether the process has exited and stdio is closed + this.processError = ''; + this.processExitCode = 0; + this.processExited = false; // tracks whether the process has exited + this.processStderr = false; // tracks whether stderr was written to + this.delay = 10000; // 10 seconds + this.done = false; + this.timeout = null; + if (!toolPath) { + throw new Error('toolPath must not be empty'); + } + this.options = options; + this.toolPath = toolPath; + if (options.delay) { + this.delay = options.delay; + } + } + CheckComplete() { + if (this.done) { + return; + } + if (this.processClosed) { + this._setResult(); + } + else if (this.processExited) { + this.timeout = setTimeout(ExecState.HandleTimeout, this.delay, this); + } + } + _debug(message) { + this.emit('debug', message); + } + _setResult() { + // determine whether there is an error + let error; + if (this.processExited) { + if (this.processError) { + error = new Error(`There was an error when attempting to execute the process '${this.toolPath}'. This may indicate the process failed to start. Error: ${this.processError}`); + } + else if (this.processExitCode !== 0 && !this.options.ignoreReturnCode) { + error = new Error(`The process '${this.toolPath}' failed with exit code ${this.processExitCode}`); + } + else if (this.processStderr && this.options.failOnStdErr) { + error = new Error(`The process '${this.toolPath}' failed because one or more lines were written to the STDERR stream`); + } + } + // clear the timeout + if (this.timeout) { + clearTimeout(this.timeout); + this.timeout = null; + } + this.done = true; + this.emit('done', error, this.processExitCode); + } + static HandleTimeout(state) { + if (state.done) { + return; + } + if (!state.processClosed && state.processExited) { + const message = `The STDIO streams did not close within ${state.delay / + 1000} seconds of the exit event from process '${state.toolPath}'. This may indicate a child process inherited the STDIO streams and has not yet exited.`; + state._debug(message); + } + state._setResult(); + } +} +//# sourceMappingURL=toolrunner.js.map + +/***/ }), +/* 687 */, +/* 688 */, +/* 689 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +try { + var util = __webpack_require__(669); + /* istanbul ignore next */ + if (typeof util.inherits !== 'function') throw ''; + module.exports = util.inherits; +} catch (e) { + /* istanbul ignore next */ + module.exports = __webpack_require__(315); +} + + +/***/ }), +/* 690 */, +/* 691 */, +/* 692 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +class Deprecation extends Error { + constructor(message) { + super(message); // Maintains proper stack trace (only available on V8) + + /* istanbul ignore next */ + + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); + } + + this.name = 'Deprecation'; + } + +} + +exports.Deprecation = Deprecation; + + +/***/ }), +/* 693 */, +/* 694 */, +/* 695 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const { stringify } = __webpack_require__(477) +const { outputFile } = __webpack_require__(294) + +async function outputJson (file, data, options = {}) { + const str = stringify(data, options) + + await outputFile(file, str, options) +} + +module.exports = outputJson + + +/***/ }), +/* 696 */, +/* 697 */ +/***/ (function(module) { + +"use strict"; + +module.exports = (promise, onFinally) => { + onFinally = onFinally || (() => {}); + + return promise.then( + val => new Promise(resolve => { + resolve(onFinally()); + }).then(() => val), + err => new Promise(resolve => { + resolve(onFinally()); + }).then(() => { + throw err; + }) + ); +}; + + +/***/ }), +/* 698 */, +/* 699 */, +/* 700 */, +/* 701 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { logger } = __webpack_require__(79); +const { + prepareEnv, + createGithubInformationObject, + getEvent +} = __webpack_require__(8); +const { start } = __webpack_require__(785); +const { createCommonConfig } = __webpack_require__(668); +const { getProcessEnvVariable } = __webpack_require__(867); +const fse = __webpack_require__(232); + +/** + * Executes pull request flow + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {Object} pullRequestData information from pull request event + * @param {String} rootFolder path to store flow data/projects + * @param {Boolean} isArchiveArtifacts + */ +async function execute( + token, + octokit, + env, + eventData, + rootFolder, + isArchiveArtifacts +) { + const githubInformation = createGithubInformationObject(eventData, env); + const config = await createCommonConfig(githubInformation, rootFolder, env); + const context = { token, octokit, config }; + await start(context, isArchiveArtifacts); +} + +/** + * Prepares execution when this is triggered from a github action's event + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + */ +async function executeFromEvent(token, octokit, env) { + const eventDataStr = await fse.readFile( + getProcessEnvVariable("GITHUB_EVENT_PATH"), + "utf8" + ); + const eventData = JSON.parse(eventDataStr); + await execute(token, octokit, env, eventData, undefined, true); +} + +/** + * Prepares execution when this is triggered from command line + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {String} rootFolder path to store flow data/projects + * @param {String} eventUrl event url + */ +async function executeLocally(token, octokit, env, rootFolder, eventUrl) { + logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); + + const eventData = await getEvent(octokit, eventUrl); + prepareEnv(env, eventUrl, eventData); + await execute(token, octokit, env, eventData, rootFolder, false); +} + +module.exports = { executeLocally, executeFromEvent }; + + +/***/ }), +/* 702 */, +/* 703 */, +/* 704 */, +/* 705 */, +/* 706 */, +/* 707 */, +/* 708 */, +/* 709 */, +/* 710 */, +/* 711 */, +/* 712 */, +/* 713 */, +/* 714 */, +/* 715 */, +/* 716 */, +/* 717 */, +/* 718 */, +/* 719 */, +/* 720 */, +/* 721 */, +/* 722 */, +/* 723 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// JS-YAML's default schema for `safeLoad` function. +// It is not described in the YAML specification. +// +// This schema is based on standard YAML's Core schema and includes most of +// extra types described at YAML tag repository. (http://yaml.org/type/) + + + + + +var Schema = __webpack_require__(43); + + +module.exports = new Schema({ + include: [ + __webpack_require__(611) + ], + implicit: [ + __webpack_require__(82), + __webpack_require__(633) + ], + explicit: [ + __webpack_require__(913), + __webpack_require__(842), + __webpack_require__(947), + __webpack_require__(100) + ] +}); + + +/***/ }), +/* 724 */, +/* 725 */, +/* 726 */, +/* 727 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + +const u = __webpack_require__(676).fromPromise +const { makeDir: _makeDir, makeDirSync } = __webpack_require__(54) +const makeDir = u(_makeDir) + +module.exports = { + mkdirs: makeDir, + mkdirsSync: makeDirSync, + // alias + mkdirp: makeDir, + mkdirpSync: makeDirSync, + ensureDir: makeDir, + ensureDirSync: makeDirSync +} + + +/***/ }), +/* 728 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + +Object.defineProperty(exports, "__esModule", { value: true }); +class SearchState { + constructor(path, level) { + this.path = path; + this.level = level; + } +} +exports.SearchState = SearchState; +//# sourceMappingURL=internal-search-state.js.map + +/***/ }), +/* 729 */, +/* 730 */, +/* 731 */, +/* 732 */, +/* 733 */, +/* 734 */, +/* 735 */, +/* 736 */, +/* 737 */, +/* 738 */, +/* 739 */, +/* 740 */ +/***/ (function(module) { + +"use strict"; + + + +function isNothing(subject) { + return (typeof subject === 'undefined') || (subject === null); +} + + +function isObject(subject) { + return (typeof subject === 'object') && (subject !== null); +} + + +function toArray(sequence) { + if (Array.isArray(sequence)) return sequence; + else if (isNothing(sequence)) return []; + + return [ sequence ]; +} + + +function extend(target, source) { + var index, length, key, sourceKeys; + + if (source) { + sourceKeys = Object.keys(source); + + for (index = 0, length = sourceKeys.length; index < length; index += 1) { + key = sourceKeys[index]; + target[key] = source[key]; + } + } + + return target; +} + + +function repeat(string, count) { + var result = '', cycle; + + for (cycle = 0; cycle < count; cycle += 1) { + result += string; + } + + return result; +} + + +function isNegativeZero(number) { + return (number === 0) && (Number.NEGATIVE_INFINITY === 1 / number); +} + + +module.exports.isNothing = isNothing; +module.exports.isObject = isObject; +module.exports.toArray = toArray; +module.exports.repeat = repeat; +module.exports.isNegativeZero = isNegativeZero; +module.exports.extend = extend; + + +/***/ }), +/* 741 */, +/* 742 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var fs = __webpack_require__(747) +var core +if (process.platform === 'win32' || global.TESTING_WINDOWS) { + core = __webpack_require__(818) +} else { + core = __webpack_require__(197) +} + +module.exports = isexe +isexe.sync = sync + +function isexe (path, options, cb) { + if (typeof options === 'function') { + cb = options + options = {} + } + + if (!cb) { + if (typeof Promise !== 'function') { + throw new TypeError('callback not provided') + } + + return new Promise(function (resolve, reject) { + isexe(path, options || {}, function (er, is) { + if (er) { + reject(er) + } else { + resolve(is) + } + }) + }) + } + + core(path, options || {}, function (er, is) { + // ignore EACCES because that just means we aren't allowed to run it + if (er) { + if (er.code === 'EACCES' || options && options.ignoreErrors) { + er = null + is = false } - }], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"], - removeMembershipInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}", {}, { - renamed: ["teams", "removeMembershipForUserInOrg"] - }], - removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"], - removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"], - reviewProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}", { - mediaType: { - previews: ["inertia"] + } + cb(er, is) + }) +} + +function sync (path, options) { + // my kingdom for a filtered catch + try { + return core.sync(path, options || {}) + } catch (er) { + if (options && options.ignoreErrors || er.code === 'EACCES') { + return false + } else { + throw er + } + } +} + + +/***/ }), +/* 743 */, +/* 744 */, +/* 745 */, +/* 746 */, +/* 747 */ +/***/ (function(module) { + +module.exports = require("fs"); + +/***/ }), +/* 748 */, +/* 749 */, +/* 750 */, +/* 751 */, +/* 752 */, +/* 753 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var endpoint = __webpack_require__(385); +var universalUserAgent = __webpack_require__(392); +var isPlainObject = __webpack_require__(356); +var nodeFetch = _interopDefault(__webpack_require__(454)); +var requestError = __webpack_require__(463); + +const VERSION = "5.4.9"; + +function getBufferResponse(response) { + return response.arrayBuffer(); +} + +function fetchWrapper(requestOptions) { + if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { + requestOptions.body = JSON.stringify(requestOptions.body); + } + + let headers = {}; + let status; + let url; + const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch; + return fetch(requestOptions.url, Object.assign({ + method: requestOptions.method, + body: requestOptions.body, + headers: requestOptions.headers, + redirect: requestOptions.redirect + }, requestOptions.request)).then(response => { + url = response.url; + status = response.status; + + for (const keyAndValue of response.headers) { + headers[keyAndValue[0]] = keyAndValue[1]; + } + + if (status === 204 || status === 205) { + return; + } // GitHub API returns 200 for HEAD requests + + + if (requestOptions.method === "HEAD") { + if (status < 400) { + return; } - }, { - renamed: ["teams", "checkPermissionsForProjectInOrg"] - }], - updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"], - updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] + + throw new requestError.RequestError(response.statusText, status, { + headers, + request: requestOptions + }); + } + + if (status === 304) { + throw new requestError.RequestError("Not modified", status, { + headers, + request: requestOptions + }); + } + + if (status >= 400) { + return response.text().then(message => { + const error = new requestError.RequestError(message, status, { + headers, + request: requestOptions + }); + + try { + let responseBody = JSON.parse(error.message); + Object.assign(error, responseBody); + let errors = responseBody.errors; // Assumption `errors` would always be in Array format + + error.message = error.message + ": " + errors.map(JSON.stringify).join(", "); + } catch (e) {// ignore, see octokit/rest.js#684 + } + + throw error; + }); + } + + const contentType = response.headers.get("content-type"); + + if (/application\/json/.test(contentType)) { + return response.json(); + } + + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + + return getBufferResponse(response); + }).then(data => { + return { + status, + url, + headers, + data + }; + }).catch(error => { + if (error instanceof requestError.RequestError) { + throw error; + } + + throw new requestError.RequestError(error.message, 500, { + headers, + request: requestOptions + }); + }); +} + +function withDefaults(oldEndpoint, newDefaults) { + const endpoint = oldEndpoint.defaults(newDefaults); + + const newApi = function (route, parameters) { + const endpointOptions = endpoint.merge(route, parameters); + + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint.parse(endpointOptions)); + } + + const request = (route, parameters) => { + return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); + }; + + Object.assign(request, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); + return endpointOptions.request.hook(request, endpointOptions); + }; + + return Object.assign(newApi, { + endpoint, + defaults: withDefaults.bind(null, endpoint) + }); +} + +const request = withDefaults(endpoint.endpoint, { + headers: { + "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` + } +}); + +exports.request = request; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 754 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { + checkoutDefinitionTree, + getPlaceHolders +} = __webpack_require__(330); +const { executeBuild } = __webpack_require__(11); +const { getTreeForProject } = __webpack_require__(352); +const { printCheckoutInformation } = __webpack_require__(656); +const { logger } = __webpack_require__(79); +const core = __webpack_require__(470); +const { + archiveArtifacts +} = __webpack_require__(503); + +async function start(context, isArchiveArtifacts = true) { + core.startGroup( + `[Single Flow] Checking out ${context.config.github.groupProject} and its dependencies` + ); + const definitionTree = await getTreeForProject( + context.config.github.inputs.definitionFile, + context.config.github.repository, + await getPlaceHolders(context, context.config.github.inputs.definitionFile) + ); + const nodeChain = [definitionTree]; + + logger.info( + `Single flow for project ${ + context.config.github.inputs.startingProject + }. Nodes: ${nodeChain.map(node => "\n" + node.project)}` + ); + const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); + core.endGroup(); + + core.startGroup(`[Single Flow] Checkout Summary...`); + printCheckoutInformation(checkoutInfo); + core.endGroup(); + + const executionResult = await executeBuild( + context.config.rootFolder, + nodeChain, + context.config.github.repository + ) + .then(() => true) + .catch(e => e); + + if (isArchiveArtifacts) { + core.startGroup(`[Single Flow] Archiving artifacts...`); + await archiveArtifacts( + nodeChain.find(node => node.project === context.config.github.repository), + nodeChain, + executionResult === true ? ["success", "always"] : ["failure", "always"] + ); + core.endGroup(); + } else { + logger.info("Archive artifact won't be executed"); + } + + if (executionResult !== true) { + logger.error(executionResult); + throw new Error( + `Command executions have failed, please review latest execution ${executionResult}` + ); + } +} + +module.exports = { + start +}; + + +/***/ }), +/* 755 */, +/* 756 */, +/* 757 */, +/* 758 */, +/* 759 */, +/* 760 */, +/* 761 */ +/***/ (function(module) { + +module.exports = require("zlib"); + +/***/ }), +/* 762 */, +/* 763 */ +/***/ (function(module) { + +module.exports = removeHook + +function removeHook (state, name, method) { + if (!state.registry[name]) { + return + } + + var index = state.registry[name] + .map(function (registered) { return registered.orig }) + .indexOf(method) + + if (index === -1) { + return + } + + state.registry[name].splice(index, 1) +} + + +/***/ }), +/* 764 */, +/* 765 */ +/***/ (function(module) { + +module.exports = require("process"); + +/***/ }), +/* 766 */, +/* 767 */, +/* 768 */ +/***/ (function(module) { + +"use strict"; + +module.exports = function (x) { + var lf = typeof x === 'string' ? '\n' : '\n'.charCodeAt(); + var cr = typeof x === 'string' ? '\r' : '\r'.charCodeAt(); + + if (x[x.length - 1] === lf) { + x = x.slice(0, x.length - 1); + } + + if (x[x.length - 1] === cr) { + x = x.slice(0, x.length - 1); + } + + return x; +}; + + +/***/ }), +/* 769 */, +/* 770 */, +/* 771 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { logger } = __webpack_require__(79); +const { + prepareEnv, + createGithubInformationObject, + getEvent +} = __webpack_require__(8); +const { start } = __webpack_require__(137); +const { createCommonConfig } = __webpack_require__(668); +const { getProcessEnvVariable } = __webpack_require__(867); +const fse = __webpack_require__(232); + +/** + * Executes full downstream flow + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {Object} pullRequestData information from pull request event + * @param {String} rootFolder path to store flow data/projects + * @param {Boolean} isArchiveArtifacts + */ +async function execute( + token, + octokit, + env, + eventData, + rootFolder, + isArchiveArtifacts +) { + const githubInformation = createGithubInformationObject(eventData, env); + const config = await createCommonConfig(githubInformation, rootFolder, env); + const context = { token, octokit, config }; + await start(context, isArchiveArtifacts); +} + +/** + * Prepares execution when this is triggered from a github action's event + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + */ +async function executeFromEvent(token, octokit, env) { + const eventDataStr = await fse.readFile( + getProcessEnvVariable("GITHUB_EVENT_PATH"), + "utf8" + ); + const eventData = JSON.parse(eventDataStr); + await execute(token, octokit, env, eventData, undefined, true); +} + +/** + * Prepares execution when this is triggered from command line + * @param {String} token the token to communicate to github + * @param {Object} octokit octokit instance + * @param {Object} env proces.env + * @param {String} rootFolder path to store flow data/projects + * @param {String} eventUrl event url + */ +async function executeLocally(token, octokit, env, rootFolder, eventUrl) { + logger.info(`Executing pull request flow for ${eventUrl} in ${rootFolder}`); + + const eventData = await getEvent(octokit, eventUrl); + prepareEnv(env, eventUrl, eventData); + await execute(token, octokit, env, eventData, rootFolder, false); +} + +module.exports = { executeLocally, executeFromEvent }; + + +/***/ }), +/* 772 */, +/* 773 */, +/* 774 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +const u = __webpack_require__(676).fromCallback +module.exports = { + copy: u(__webpack_require__(500)) +} + + +/***/ }), +/* 775 */, +/* 776 */, +/* 777 */, +/* 778 */, +/* 779 */, +/* 780 */, +/* 781 */, +/* 782 */, +/* 783 */, +/* 784 */, +/* 785 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const { + checkoutDefinitionTree, + getPlaceHolders +} = __webpack_require__(330); +const { executeBuild } = __webpack_require__(11); +const { + getTreeForProject, + parentChainFromNode +} = __webpack_require__(352); +const { printCheckoutInformation } = __webpack_require__(656); +const { logger } = __webpack_require__(79); +const core = __webpack_require__(470); +const { + archiveArtifacts +} = __webpack_require__(503); + +async function start(context, isArchiveArtifacts = true) { + core.startGroup( + `[Pull Request Flow] Checking out ${context.config.github.groupProject} and its dependencies` + ); + const definitionTree = await getTreeForProject( + context.config.github.inputs.definitionFile, + context.config.github.repository, + await getPlaceHolders(context, context.config.github.inputs.definitionFile) + ); + const nodeChain = await parentChainFromNode(definitionTree); + + logger.info( + `Tree for project ${ + context.config.github.repository + }. Dependencies: ${nodeChain.map(node => "\n" + node.project)}` + ); + const checkoutInfo = await checkoutDefinitionTree(context, nodeChain); + core.endGroup(); + + core.startGroup(`[Pull Request Flow] Checkout Summary...`); + printCheckoutInformation(checkoutInfo); + core.endGroup(); + + const executionResult = await executeBuild( + context.config.rootFolder, + nodeChain, + context.config.github.repository + ) + .then(() => true) + .catch(e => e); + + if (isArchiveArtifacts) { + core.startGroup(`[Pull Request Flow] Archiving artifacts...`); + await archiveArtifacts( + nodeChain.find(node => node.project === context.config.github.repository), + nodeChain, + executionResult === true ? ["success", "always"] : ["failure", "always"] + ); + core.endGroup(); + } else { + logger.info("Archive artifact won't be executed"); + } + + if (executionResult !== true) { + logger.error(executionResult); + throw new Error( + `Command executions have failed, please review latest execution ${executionResult}` + ); + } +} + +module.exports = { + start +}; + + +/***/ }), +/* 786 */, +/* 787 */ +/***/ (function(module) { + +module.exports = { + warn: "warn", + error: "error", + ignore: "ignore" +}; + + +/***/ }), +/* 788 */, +/* 789 */, +/* 790 */, +/* 791 */, +/* 792 */, +/* 793 */, +/* 794 */ +/***/ (function(module) { + +module.exports = require("stream"); + +/***/ }), +/* 795 */, +/* 796 */ +/***/ (function(__unusedmodule, exports, __webpack_require__) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } + +var osName = _interopDefault(__webpack_require__(2)); + +function getUserAgent() { + try { + return `Node.js/${process.version.substr(1)} (${osName()}; ${process.arch})`; + } catch (error) { + if (/wmic os get Caption/.test(error.message)) { + return "Windows "; + } + + return ""; + } +} + +exports.getUserAgent = getUserAgent; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 797 */, +/* 798 */, +/* 799 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +const fs = __webpack_require__(747); + +const { getUrlContent } = __webpack_require__(593); +const { treatUrl } = __webpack_require__(824); +const { + validateDefinition, + validateDependencies +} = __webpack_require__(127); +const { read: readYaml } = __webpack_require__(674); + +/** + * It will return back the definition file plus dependencies as an object + * + * @param {string} file - The definition file. It can be a URL or a in the filesystem. + * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL + */ +async function readDefinitionFile(file, urlPlaceHolders = {}) { + return file.startsWith("http") + ? readDefinitionFileFromUrl(file, urlPlaceHolders) + : readDefinitionFileFromFile(file, urlPlaceHolders); +} + +/** + * reads definition from a file and returns back the yaml object + * @param {String} filePath the definition file path + * @param {Object} urlPlaceHolders the url place holders to replace url + */ +async function readDefinitionFileFromFile(filePath, urlPlaceHolders) { + const defintionFileContent = fs.readFileSync(filePath, "utf8"); + return loadYaml( + readYaml(defintionFileContent), + filePath.substring(0, filePath.lastIndexOf("/")), + defintionFileContent, + urlPlaceHolders + ); +} + +/** + * reads definition from a url and returns back the yaml object + * @param {String} url the url to the definition file + * @param {Object} urlPlaceHolders the url place holders to replace url + */ +async function readDefinitionFileFromUrl(url, urlPlaceHolders) { + const treatedUrl = treatUrl(url, urlPlaceHolders); + return loadYaml( + readYaml(await getUrlContent(treatedUrl)), + "./", + url, + urlPlaceHolders + ); +} + +/** + * it loads dependencies content from a external file/url and returns back the yaml object + * @param {Object} definitionYaml the definition yaml object + * @param {String} definitionFileFolder the definition folder path + * @param {String} containerPath the path or url where the container was loaded + * @param {Object} urlPlaceHolders the url place holders to replace url + */ +async function loadYaml( + definitionYaml, + definitionFileFolder, + containerPath, + urlPlaceHolders +) { + validateDefinition(definitionYaml); + definitionYaml.dependencies = await loadDependencies( + definitionYaml.dependencies, + definitionFileFolder, + containerPath, + urlPlaceHolders + ); + return definitionYaml; +} + +/** + * + * @param {Array|String} dependencies is the dependencies/extends property of the file, it can be an Array or a file path/URL + * @param {*} definitionFileFolder the folder + * @param {*} containerPath + * @param {*} urlPlaceHolders + */ +async function loadDependencies( + dependencies, + definitionFileFolder, + containerPath, + urlPlaceHolders +) { + let dependenciesFinalPath = dependencies; + if (dependencies) { + if ( + containerPath.startsWith("http") && + !Array.isArray(dependencies) && + !dependencies.startsWith("http") + ) { + const treatedUrl = treatUrl(containerPath, urlPlaceHolders); + dependenciesFinalPath = `${treatedUrl.substring( + 0, + treatedUrl.lastIndexOf("/") + )}/${dependencies}`; + const dependenciesContent = await getUrlContent(dependenciesFinalPath); + fs.writeFileSync(dependencies, dependenciesContent); + } + + if (!Array.isArray(dependencies)) { + const dependenciesFilePath = dependencies.startsWith("http") + ? treatUrl(dependencies, urlPlaceHolders) + : `${definitionFileFolder}/${dependencies}`; + const dependenciesFileContent = dependencies.startsWith("http") + ? await getUrlContent(dependenciesFilePath) + : fs.readFileSync(dependenciesFilePath, "utf8"); + const dependenciesYaml = readYaml(dependenciesFileContent); + // console.log(`dependenciesFilePath ${dependenciesFilePath}`, dependenciesYaml, urlPlaceHolders) + validateDependencies(dependenciesYaml); + // Once the dependencies are loaded, the `extends` proporty is concatenated to the current dependencies + return ( + await loadDependencies( + dependenciesYaml.extends, + dependenciesFilePath.substring( + 0, + dependenciesFilePath.lastIndexOf("/") + ), + dependenciesFinalPath, + urlPlaceHolders + ) + ).concat(dependenciesYaml.dependencies); + } else { + // There's no extension for embedded dependencies + return dependencies; + } + } else { + return []; + } +} + +module.exports = { readDefinitionFile }; + + +/***/ }), +/* 800 */, +/* 801 */, +/* 802 */, +/* 803 */, +/* 804 */, +/* 805 */, +/* 806 */, +/* 807 */, +/* 808 */, +/* 809 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +function resolveYamlNull(data) { + if (data === null) return true; + + var max = data.length; + + return (max === 1 && data === '~') || + (max === 4 && (data === 'null' || data === 'Null' || data === 'NULL')); +} + +function constructYamlNull() { + return null; +} + +function isNull(object) { + return object === null; +} + +module.exports = new Type('tag:yaml.org,2002:null', { + kind: 'scalar', + resolve: resolveYamlNull, + construct: constructYamlNull, + predicate: isNull, + represent: { + canonical: function () { return '~'; }, + lowercase: function () { return 'null'; }, + uppercase: function () { return 'NULL'; }, + camelcase: function () { return 'Null'; } }, - users: { - addEmailForAuthenticated: ["POST /user/emails"], - addEmails: ["POST /user/emails", {}, { - renamed: ["users", "addEmailsForAuthenticated"] - }], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowing: ["GET /user/following/{username}", {}, { - renamed: ["users", "checkPersonIsFollowedByAuthenticated"] - }], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKey: ["POST /user/gpg_keys", {}, { - renamed: ["users", "createGpgKeyForAuthenticated"] - }], - createGpgKeyForAuthenticated: ["POST /user/gpg_keys"], - createPublicKey: ["POST /user/keys", {}, { - renamed: ["users", "createPublicSshKeyForAuthenticated"] - }], - createPublicSshKeyForAuthenticated: ["POST /user/keys"], - deleteEmailForAuthenticated: ["DELETE /user/emails"], - deleteEmails: ["DELETE /user/emails", {}, { - renamed: ["users", "deleteEmailsForAuthenticated"] - }], - deleteGpgKey: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "deleteGpgKeyForAuthenticated"] - }], - deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicKey: ["DELETE /user/keys/{key_id}", {}, { - renamed: ["users", "deletePublicSshKeyForAuthenticated"] - }], - deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}"], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKey: ["GET /user/gpg_keys/{gpg_key_id}", {}, { - renamed: ["users", "getGpgKeyForAuthenticated"] - }], - getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicKey: ["GET /user/keys/{key_id}", {}, { - renamed: ["users", "getPublicSshKeyForAuthenticated"] - }], - getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}"], - list: ["GET /users"], - listBlocked: ["GET /user/blocks", {}, { - renamed: ["users", "listBlockedByAuthenticated"] - }], - listBlockedByAuthenticated: ["GET /user/blocks"], - listEmails: ["GET /user/emails", {}, { - renamed: ["users", "listEmailsForAuthenticated"] - }], - listEmailsForAuthenticated: ["GET /user/emails"], - listFollowedByAuthenticated: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForAuthenticatedUser: ["GET /user/following", {}, { - renamed: ["users", "listFollowedByAuthenticated"] - }], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeys: ["GET /user/gpg_keys", {}, { - renamed: ["users", "listGpgKeysForAuthenticated"] - }], - listGpgKeysForAuthenticated: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmails: ["GET /user/public_emails", {}, { - renamed: ["users", "listPublicEmailsForAuthenticatedUser"] - }], - listPublicEmailsForAuthenticated: ["GET /user/public_emails"], - listPublicKeys: ["GET /user/keys", {}, { - renamed: ["users", "listPublicSshKeysForAuthenticated"] - }], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: ["GET /user/keys"], - setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility"], - togglePrimaryEmailVisibility: ["PATCH /user/email/visibility", {}, { - renamed: ["users", "setPrimaryEmailVisibilityForAuthenticated"] - }], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] + defaultStyle: 'lowercase' +}); + + +/***/ }), +/* 810 */, +/* 811 */, +/* 812 */, +/* 813 */ +/***/ (function(__unusedmodule, exports) { + +"use strict"; + + +Object.defineProperty(exports, '__esModule', { value: true }); + +async function auth(token) { + const tokenType = token.split(/\./).length === 3 ? "app" : /^v\d+\./.test(token) ? "installation" : "oauth"; + return { + type: "token", + token: token, + tokenType + }; +} + +/** + * Prefix token for usage in the Authorization header + * + * @param token OAuth token or JSON Web Token + */ +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; + } + + return `token ${token}`; +} + +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge(route, parameters); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +const createTokenAuth = function createTokenAuth(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + + if (typeof token !== "string") { + throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string"); + } + + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; + +exports.createTokenAuth = createTokenAuth; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 814 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = which +which.sync = whichSync + +var isWindows = process.platform === 'win32' || + process.env.OSTYPE === 'cygwin' || + process.env.OSTYPE === 'msys' + +var path = __webpack_require__(622) +var COLON = isWindows ? ';' : ':' +var isexe = __webpack_require__(742) + +function getNotFoundError (cmd) { + var er = new Error('not found: ' + cmd) + er.code = 'ENOENT' + + return er +} + +function getPathInfo (cmd, opt) { + var colon = opt.colon || COLON + var pathEnv = opt.path || process.env.PATH || '' + var pathExt = [''] + + pathEnv = pathEnv.split(colon) + + var pathExtExe = '' + if (isWindows) { + pathEnv.unshift(process.cwd()) + pathExtExe = (opt.pathExt || process.env.PATHEXT || '.EXE;.CMD;.BAT;.COM') + pathExt = pathExtExe.split(colon) + + + // Always test the cmd itself first. isexe will check to make sure + // it's found in the pathExt set. + if (cmd.indexOf('.') !== -1 && pathExt[0] !== '') + pathExt.unshift('') + } + + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + if (cmd.match(/\//) || isWindows && cmd.match(/\\/)) + pathEnv = [''] + + return { + env: pathEnv, + ext: pathExt, + extExe: pathExtExe + } +} + +function which (cmd, opt, cb) { + if (typeof opt === 'function') { + cb = opt + opt = {} + } + + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + ;(function F (i, l) { + if (i === l) { + if (opt.all && found.length) + return cb(null, found) + else + return cb(getNotFoundError(cmd)) + } + + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) + + var p = path.join(pathPart, cmd) + if (!pathPart && (/^\.[\\\/]/).test(cmd)) { + p = cmd.slice(0, 2) + p + } + ;(function E (ii, ll) { + if (ii === ll) return F(i + 1, l) + var ext = pathExt[ii] + isexe(p + ext, { pathExt: pathExtExe }, function (er, is) { + if (!er && is) { + if (opt.all) + found.push(p + ext) + else + return cb(null, p + ext) + } + return E(ii + 1, ll) + }) + })(0, pathExt.length) + })(0, pathEnv.length) +} + +function whichSync (cmd, opt) { + opt = opt || {} + + var info = getPathInfo(cmd, opt) + var pathEnv = info.env + var pathExt = info.ext + var pathExtExe = info.extExe + var found = [] + + for (var i = 0, l = pathEnv.length; i < l; i ++) { + var pathPart = pathEnv[i] + if (pathPart.charAt(0) === '"' && pathPart.slice(-1) === '"') + pathPart = pathPart.slice(1, -1) + + var p = path.join(pathPart, cmd) + if (!pathPart && /^\.[\\\/]/.test(cmd)) { + p = cmd.slice(0, 2) + p + } + for (var j = 0, ll = pathExt.length; j < ll; j ++) { + var cur = p + pathExt[j] + var is + try { + is = isexe.sync(cur, { pathExt: pathExtExe }) + if (is) { + if (opt.all) + found.push(cur) + else + return cur + } + } catch (ex) {} + } } -}; -const VERSION = "3.17.0"; + if (opt.all && found.length) + return found -function endpointsToMethods(octokit, endpointsMap) { - const newMethods = {}; + if (opt.nothrow) + return null - for (const [scope, endpoints] of Object.entries(endpointsMap)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign({ - method, - url - }, defaults); + throw getNotFoundError(cmd) +} - if (!newMethods[scope]) { - newMethods[scope] = {}; - } - const scopeMethods = newMethods[scope]; +/***/ }), +/* 815 */, +/* 816 */ +/***/ (function(module) { - if (decorations) { - scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); - continue; - } +"use strict"; - scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); +module.exports = /^#!.*/; + + +/***/ }), +/* 817 */, +/* 818 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +module.exports = isexe +isexe.sync = sync + +var fs = __webpack_require__(747) + +function checkPathExt (path, options) { + var pathext = options.pathExt !== undefined ? + options.pathExt : process.env.PATHEXT + + if (!pathext) { + return true + } + + pathext = pathext.split(';') + if (pathext.indexOf('') !== -1) { + return true + } + for (var i = 0; i < pathext.length; i++) { + var p = pathext[i].toLowerCase() + if (p && path.substr(-p.length).toLowerCase() === p) { + return true } } + return false +} - return newMethods; +function checkStat (stat, path, options) { + if (!stat.isSymbolicLink() && !stat.isFile()) { + return false + } + return checkPathExt(path, options) } -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); +function isexe (path, options, cb) { + fs.stat(path, function (er, stat) { + cb(er, er ? false : checkStat(stat, path, options)) + }) +} - function withDecorations(...args) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` +function sync (path, options) { + return checkStat(fs.statSync(path), path, options) +} - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: undefined - }); - return requestWithDefaults(options); - } // NOTE: there are currently no deprecations. But we keep the code - // below for future reference +/***/ }), +/* 819 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`); - } +const glob = __webpack_require__(281); +const path = __webpack_require__(622); +const { stat } = __webpack_require__(747); +const { dirname } = __webpack_require__(622); +const { promisify } = __webpack_require__(669); +const stats = promisify(stat); +const { logger } = __webpack_require__(79); - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } +async function findFilesToUpload(searchPath, globOptions) { + const globber = await glob.create( + searchPath, + globOptions || getDefaultGlobOptions() + ); + const filesToUpload = await getSearchResults(globber); - if (decorations.renamedParameters) { - // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 - const options = requestWithDefaults.endpoint.merge(...args); + return { + filesToUpload, + rootDirectory: getRootDirectory(globber, filesToUpload) + }; +} - for (const [name, alias] of Object.entries(decorations.renamedParameters)) { - // There is currently no deprecated parameter that is optional, - // so we never hit the else branch below at this point. +/** + * If multiple paths are specific, the least common ancestor (LCA) of the search paths is used as + * the delimiter to control the directory structure for the artifact. This function returns the LCA + * when given an array of search paths + * + * Example 1: The patterns `/foo/` and `/bar/` returns `/` + * + * Example 2: The patterns `~/foo/bar/*` and `~/foo/voo/two/*` and `~/foo/mo/` returns `~/foo` + */ +function getMultiPathLCA(searchPaths) { + if (searchPaths.length < 2) { + throw new Error("At least two search paths must be provided"); + } - /* istanbul ignore else */ - if (name in options) { - octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); + const commonPaths = []; + let smallestPathLength = Number.MAX_SAFE_INTEGER; - if (!(alias in options)) { - options[alias] = options[name]; - } + const splitPaths = searchPaths + .map(searchPath => path.normalize(searchPath).split(path.sep)) + .reduce((acc, splitSearchPath) => { + smallestPathLength = Math.min(smallestPathLength, splitSearchPath.length); + acc.push(splitSearchPath); + return acc; + }, []); - delete options[name]; - } + if (searchPaths[0].startsWith(path.sep)) { + commonPaths.push(path.sep); + } + + let splitIndex = 0; + function isPathTheSame() { + const compare = splitPaths[0][splitIndex]; + for (let i = 1; i < splitPaths.length; i++) { + if (compare !== splitPaths[i][splitIndex]) { + return false; } + } + return true; + } - return requestWithDefaults(options); - } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 + while (splitIndex < smallestPathLength) { + if (!isPathTheSame()) { + break; + } + commonPaths.push(splitPaths[0][splitIndex]); + splitIndex++; + } + return path.join(...commonPaths); +} + +async function getSearchResults(globber) { + const rawSearchResults = await globber.glob(); + + const searchResults = []; + for (const searchResult of rawSearchResults) { + const fileStats = await stats(searchResult); + if (!fileStats.isDirectory()) { + searchResults.push(searchResult); + if ( + searchResults + .map(item => item.toLowerCase()) + .find(item => item === searchResult.toLowerCase()) + ) { + logger.info( + `Uploads are case insensitive: ${searchResult} was detected that it will be overwritten by another file with the same path` + ); + } + } + } + return searchResults; +} + +function getRootDirectory(globber, searchResults) { + const searchPaths = globber.getSearchPaths(); + if (searchPaths.length > 1) { + logger.info( + `Multiple search paths detected. Calculating the least common ancestor of all paths` + ); + const lcaSearchPath = getMultiPathLCA(searchPaths); + logger.info( + `The least common ancestor is ${lcaSearchPath}. This will be the root directory of the artifact` + ); + return lcaSearchPath; + } + return searchResults.length === 1 && searchPaths[0] === searchResults[0] + ? dirname(searchResults[0]) + : searchPaths[0]; +} + +function getDefaultGlobOptions() { + return { + followSymbolicLinks: true, + implicitDescendants: true, + omitBrokenSymbolicLinks: true + }; +} +module.exports = { + findFilesToUpload +}; - return requestWithDefaults(...args); - } - return Object.assign(withDecorations, requestWithDefaults); -} +/***/ }), +/* 820 */, +/* 821 */, +/* 822 */, +/* 823 */, +/* 824 */ +/***/ (function(module) { /** - * This plugin is a 1:1 copy of internal @octokit/rest plugins. The primary - * goal is to rebuild @octokit/rest on top of @octokit/core. Once that is - * done, we will remove the registerEndpoints methods and return the methods - * directly as with the other plugins. At that point we will also remove the - * legacy workarounds and deprecations. - * - * See the plan at - * https://github.com/octokit/plugin-rest-endpoint-methods.js/pull/1 + * it treats the url in case it contains + * @param {String} url a http(s)://whatever.domain/${GROUP}/${PROJECT_NAME}/${BRANCH}/whateverfile.txt format, where place olders are optional and can be placed anywhere on the string + * @param {Object} placeHolders the key/values to replace url's place holders */ - -function restEndpointMethods(octokit) { - return endpointsToMethods(octokit, Endpoints); +function treatUrl(url, placeHolders) { + let result = url; + Object.entries(placeHolders).forEach( + ([key, value]) => (result = result.replace(`$\{${key}}`, value)) + ); + return result; } -restEndpointMethods.VERSION = VERSION; -exports.restEndpointMethods = restEndpointMethods; -//# sourceMappingURL=index.js.map +module.exports = { treatUrl }; /***/ }), -/* 843 */, -/* 844 */ +/* 825 */, +/* 826 */, +/* 827 */, +/* 828 */, +/* 829 */, +/* 830 */, +/* 831 */, +/* 832 */, +/* 833 */, +/* 834 */, +/* 835 */ /***/ (function(module) { +module.exports = require("url"); + +/***/ }), +/* 836 */, +/* 837 */, +/* 838 */, +/* 839 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + "use strict"; -// YAML error class. http://stackoverflow.com/questions/8458984 + + +var esprima; + +// Browserified version does not have esprima +// +// 1. For node.js just require module as deps +// 2. For browser try to require mudule via external AMD system. +// If not found - try to fallback to window.esprima. If not +// found too - then fail to parse. // +try { + // workaround to exclude package from browserify list. + var _require = require; + esprima = _require('esprima'); +} catch (_) { + /* eslint-disable no-redeclare */ + /* global window */ + if (typeof window !== 'undefined') esprima = window.esprima; +} +var Type = __webpack_require__(945); -function YAMLException(reason, mark) { - // Super constructor - Error.call(this); +function resolveJavascriptFunction(data) { + if (data === null) return false; - this.name = 'YAMLException'; - this.reason = reason; - this.mark = mark; - this.message = (this.reason || '(unknown reason)') + (this.mark ? ' ' + this.mark.toString() : ''); + try { + var source = '(' + data + ')', + ast = esprima.parse(source, { range: true }); - // Include stack trace in error object - if (Error.captureStackTrace) { - // Chrome and NodeJS - Error.captureStackTrace(this, this.constructor); - } else { - // FF, IE 10+ and Safari 6+. Fallback for others - this.stack = (new Error()).stack || ''; + if (ast.type !== 'Program' || + ast.body.length !== 1 || + ast.body[0].type !== 'ExpressionStatement' || + (ast.body[0].expression.type !== 'ArrowFunctionExpression' && + ast.body[0].expression.type !== 'FunctionExpression')) { + return false; + } + + return true; + } catch (err) { + return false; } } +function constructJavascriptFunction(data) { + /*jslint evil:true*/ -// Inherit from Error -YAMLException.prototype = Object.create(Error.prototype); -YAMLException.prototype.constructor = YAMLException; + var source = '(' + data + ')', + ast = esprima.parse(source, { range: true }), + params = [], + body; + if (ast.type !== 'Program' || + ast.body.length !== 1 || + ast.body[0].type !== 'ExpressionStatement' || + (ast.body[0].expression.type !== 'ArrowFunctionExpression' && + ast.body[0].expression.type !== 'FunctionExpression')) { + throw new Error('Failed to resolve function'); + } -YAMLException.prototype.toString = function toString(compact) { - var result = this.name + ': '; + ast.body[0].expression.params.forEach(function (param) { + params.push(param.name); + }); - result += this.reason || '(unknown reason)'; + body = ast.body[0].expression.body.range; - if (!compact && this.mark) { - result += ' ' + this.mark.toString(); + // Esprima's ranges include the first '{' and the last '}' characters on + // function expressions. So cut them out. + if (ast.body[0].expression.body.type === 'BlockStatement') { + /*eslint-disable no-new-func*/ + return new Function(params, source.slice(body[0] + 1, body[1] - 1)); } + // ES6 arrow functions can omit the BlockStatement. In that case, just return + // the body. + /*eslint-disable no-new-func*/ + return new Function(params, 'return ' + source.slice(body[0], body[1])); +} - return result; -}; +function representJavascriptFunction(object /*, style*/) { + return object.toString(); +} +function isFunction(object) { + return Object.prototype.toString.call(object) === '[object Function]'; +} -module.exports = YAMLException; +module.exports = new Type('tag:yaml.org,2002:js/function', { + kind: 'scalar', + resolve: resolveJavascriptFunction, + construct: constructJavascriptFunction, + predicate: isFunction, + represent: representJavascriptFunction +}); /***/ }), -/* 845 */, -/* 846 */ +/* 840 */, +/* 841 */, +/* 842 */ /***/ (function(module, __unusedexports, __webpack_require__) { -const fs = __webpack_require__(747); - -const { getUrlContent } = __webpack_require__(741); -const { treatUrl } = __webpack_require__(830); -const { - validateDefinition, - validateDependencies -} = __webpack_require__(755); -const { read: readYaml } = __webpack_require__(84); +"use strict"; -/** - * It will return back the definition file plus dependencies as an object - * - * @param {string} file - The definition file. It can be a URL or a in the filesystem. - * @param {Object} urlPlaceHolders the url place holders to replace url. This is needed in case either the definition file or the dependencies file are loaded from a URL - */ -async function readDefinitionFile(file, urlPlaceHolders = {}) { - return file.startsWith("http") - ? readDefinitionFileFromUrl(file, urlPlaceHolders) - : readDefinitionFileFromFile(file, urlPlaceHolders); -} -/** - * reads definition from a file and returns back the yaml object - * @param {String} filePath the definition file path - * @param {Object} urlPlaceHolders the url place holders to replace url - */ -async function readDefinitionFileFromFile(filePath, urlPlaceHolders) { - const defintionFileContent = fs.readFileSync(filePath, "utf8"); - console.log("readDefinitionFileFromFile", filePath); +var Type = __webpack_require__(945); - return loadYaml( - readYaml(defintionFileContent), - filePath.substring(0, filePath.lastIndexOf("/")), - defintionFileContent, - urlPlaceHolders - ); -} +var _hasOwnProperty = Object.prototype.hasOwnProperty; +var _toString = Object.prototype.toString; -/** - * reads definition from a url and returns back the yaml object - * @param {String} url the url to the definition file - * @param {Object} urlPlaceHolders the url place holders to replace url - */ -async function readDefinitionFileFromUrl(url, urlPlaceHolders) { - const treatedUrl = treatUrl(url, urlPlaceHolders); - console.log("readDefinitionFileFromUrl", url); - return loadYaml( - readYaml(await getUrlContent(treatedUrl)), - "./", - url, - urlPlaceHolders - ); -} +function resolveYamlOmap(data) { + if (data === null) return true; -/** - * it loads dependencies content from a external file/url and returns back the yaml object - * @param {Object} definitionYaml the definition yaml object - * @param {String} definitionFileFolder the definition folder path - * @param {String} containerPath the path or url where the container was loaded - * @param {Object} urlPlaceHolders the url place holders to replace url - */ -async function loadYaml( - definitionYaml, - definitionFileFolder, - containerPath, - urlPlaceHolders -) { - validateDefinition(definitionYaml); - definitionYaml.dependencies = await loadDependencies( - definitionYaml.dependencies, - definitionFileFolder, - containerPath, - urlPlaceHolders - ); - return definitionYaml; -} + var objectKeys = [], index, length, pair, pairKey, pairHasKey, + object = data; -/** - * - * @param {Array|String} dependencies is the dependencies/extends property of the file, it can be an Array or a file path/URL - * @param {*} definitionFileFolder the folder - * @param {*} containerPath - * @param {*} urlPlaceHolders - */ -async function loadDependencies( - dependencies, - definitionFileFolder, - containerPath, - urlPlaceHolders -) { - if (dependencies) { - if ( - containerPath.startsWith("http") && - !Array.isArray(dependencies) && - !dependencies.startsWith("http") - ) { - const treatedUrl = treatUrl(containerPath, urlPlaceHolders); - const dependenciesContent = await getUrlContent( - `${treatedUrl.substring( - 0, - treatedUrl.lastIndexOf("/") - )}/${dependencies}` - ); - fs.writeFileSync(dependencies, dependenciesContent); - } + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + pairHasKey = false; - if (!Array.isArray(dependencies)) { - const dependenciesFilePath = dependencies.startsWith("http") - ? treatUrl(dependencies, urlPlaceHolders) - : `${definitionFileFolder}/${dependencies}`; - const dependenciesFileContent = dependencies.startsWith("http") - ? await getUrlContent(dependenciesFilePath) - : fs.readFileSync(dependenciesFilePath, "utf8"); - const dependenciesYaml = readYaml(dependenciesFileContent); - validateDependencies(dependenciesYaml); - // Once the dependencies are loaded, the `extends` proporty is concatenated to the current dependencies - return ( - await loadDependencies( - dependenciesYaml.extends, - dependenciesFilePath.substring( - 0, - dependenciesFilePath.lastIndexOf("/") - ), - dependencies, - urlPlaceHolders - ) - ).concat(dependenciesYaml.dependencies); - } else { - // There's no extension for embedded dependencies - return dependencies; + if (_toString.call(pair) !== '[object Object]') return false; + + for (pairKey in pair) { + if (_hasOwnProperty.call(pair, pairKey)) { + if (!pairHasKey) pairHasKey = true; + else return false; + } } - } else { - return []; + + if (!pairHasKey) return false; + + if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); + else return false; } + + return true; } -module.exports = { readDefinitionFile }; +function constructYamlOmap(data) { + return data !== null ? data : []; +} + +module.exports = new Type('tag:yaml.org,2002:omap', { + kind: 'sequence', + resolve: resolveYamlOmap, + construct: constructYamlOmap +}); + + +/***/ }), +/* 843 */, +/* 844 */ +/***/ (function(module) { + +function ReadYamlException(message) { + this.message = message; + this.name = "ReadYamlException"; +} + +module.exports = { ReadYamlException }; /***/ }), +/* 845 */, +/* 846 */, /* 847 */, /* 848 */, /* 849 */ @@ -25810,10 +25432,10 @@ module.exports = { readDefinitionFile }; "use strict"; -const u = __webpack_require__(0).fromCallback +const u = __webpack_require__(676).fromCallback const path = __webpack_require__(622) const fs = __webpack_require__(598) -const _mkdirs = __webpack_require__(884) +const _mkdirs = __webpack_require__(727) const mkdirs = _mkdirs.mkdirs const mkdirsSync = _mkdirs.mkdirsSync @@ -26135,7 +25757,7 @@ function ownProp (obj, field) { } var path = __webpack_require__(622) -var minimatch = __webpack_require__(93) +var minimatch = __webpack_require__(595) var isAbsolute = __webpack_require__(681) var Minimatch = minimatch.Minimatch @@ -26395,173 +26017,30 @@ exports.getUserAgent = getUserAgent; /***/ }), /* 863 */, /* 864 */, -/* 865 */ +/* 865 */, +/* 866 */ /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; +var shebangRegex = __webpack_require__(816); -var Type = __webpack_require__(547); - -var _hasOwnProperty = Object.prototype.hasOwnProperty; -var _toString = Object.prototype.toString; - -function resolveYamlOmap(data) { - if (data === null) return true; - - var objectKeys = [], index, length, pair, pairKey, pairHasKey, - object = data; - - for (index = 0, length = object.length; index < length; index += 1) { - pair = object[index]; - pairHasKey = false; - - if (_toString.call(pair) !== '[object Object]') return false; - - for (pairKey in pair) { - if (_hasOwnProperty.call(pair, pairKey)) { - if (!pairHasKey) pairHasKey = true; - else return false; - } - } - - if (!pairHasKey) return false; - - if (objectKeys.indexOf(pairKey) === -1) objectKeys.push(pairKey); - else return false; - } - - return true; -} - -function constructYamlOmap(data) { - return data !== null ? data : []; -} - -module.exports = new Type('tag:yaml.org,2002:omap', { - kind: 'sequence', - resolve: resolveYamlOmap, - construct: constructYamlOmap -}); - - -/***/ }), -/* 866 */ -/***/ (function(module) { - -function getBuild(project, buildConfiguration) { - const build = buildConfiguration.build - ? overrideProperties( - buildConfiguration.default, - buildConfiguration.build.find( - projectBuild => project === projectBuild.project - ) - ) - : buildConfiguration.default; - delete build.project; - return manipulateArchiveArtifacts(manipulateProperties(build), project); -} - -/** - * Override properties from one object to another - * - * @param {Object} target - * @param {Object} source - */ -function overrideProperties(target, source) { - const targetClone = { ...target }; - const sourceClone = { ...source }; - Object.entries(targetClone) - .filter(([key]) => sourceClone[key]) - .forEach(([key, value]) => { - if (typeof value === "object") { - targetClone[key] = overrideProperties( - targetClone[key], - sourceClone[key] - ); - } else { - targetClone[key] = sourceClone[key]; - } - }); - - Object.keys(sourceClone) - .filter(key => !targetClone[key]) - .forEach(key => (targetClone[key] = sourceClone[key])); - return targetClone; -} - -/** - * manipulates certains properties from an object. It basically converts String lists to Arrays - * @param {Object} properties the properties to manipulate - */ -function manipulateProperties(properties) { - const propertiesClone = { ...properties }; - Object.entries(propertiesClone).forEach(([key, value]) => { - if (typeof value === "object" && !Array.isArray(value)) { - propertiesClone[key] = manipulateProperties(propertiesClone[key]); - } else { - propertiesClone[key] = - typeof value === "string" && value.includes("\n") - ? value.split("\n").filter(e => e) - : value; - } - }); - return propertiesClone; -} - -/** - * manipulates `archive-artifacts` properties - * @param {Object} build the node's build section - * @param {String} project the project name - */ -function manipulateArchiveArtifacts(build, project) { - const archiveArtifacts = build["archive-artifacts"]; - if (archiveArtifacts) { - archiveArtifacts["if-no-files-found"] = build["archive-artifacts"][ - "if-no-files-found" - ] - ? archiveArtifacts["if-no-files-found"] - : "warn"; - archiveArtifacts.dependencies = archiveArtifacts.dependencies - ? archiveArtifacts.dependencies - : "none"; - archiveArtifacts.name = archiveArtifacts.name - ? archiveArtifacts.name - : project.includes("/") - ? project.split("/")[1] - : project; - archiveArtifacts.paths = treatArchiveArtifactsPath(archiveArtifacts.path); - } - return build; -} - -function treatArchiveArtifactsPath(archiveArtifactsPath) { - return (Array.isArray(archiveArtifactsPath) - ? archiveArtifactsPath - : archiveArtifactsPath.split("\n") - ) - .filter(line => line) - .reduce((acc, pathExpression) => { - acc.push(convertPathExpressionToPath(pathExpression)); - return acc; - }, []); -} +module.exports = function (str) { + var match = str.match(shebangRegex); -function convertPathExpressionToPath(pathExpression) { - const match = pathExpression.match(/([^@]*)@?(always|success|failure)?/); - return match - ? { - path: match[1], - on: match[2] ? match[2] : "success" - } - : pathExpression; -} + if (!match) { + return null; + } -function treatProject(project, buildConfiguration) { - return { build: getBuild(project, buildConfiguration) }; -} + var arr = match[0].replace(/#! ?/, '').split(' '); + var bin = arr[0].split('/').pop(); + var arg = arr[1]; -module.exports = { treatProject }; + return (bin === 'env' ? + arg : + bin + (arg ? ' ' + arg : '') + ); +}; /***/ }), @@ -26641,7 +26120,7 @@ module.exports = { // This is adapted from https://github.com/normalize/mz // Copyright (c) 2014-2016 Jonathan Ong me@jongleberry.com and Contributors -const u = __webpack_require__(0).fromCallback +const u = __webpack_require__(676).fromCallback const fs = __webpack_require__(598) const api = [ @@ -27174,28 +26653,8 @@ module.exports = { /***/ }), /* 882 */, -/* 883 */, -/* 884 */ -/***/ (function(module, __unusedexports, __webpack_require__) { - -"use strict"; - -const u = __webpack_require__(0).fromPromise -const { makeDir: _makeDir, makeDirSync } = __webpack_require__(54) -const makeDir = u(_makeDir) - -module.exports = { - mkdirs: makeDir, - mkdirsSync: makeDirSync, - // alias - mkdirp: makeDir, - mkdirpSync: makeDirSync, - ensureDir: makeDir, - ensureDirSync: makeDirSync -} - - -/***/ }), +/* 883 */, +/* 884 */, /* 885 */, /* 886 */, /* 887 */, @@ -27211,7 +26670,7 @@ Object.defineProperty(exports, '__esModule', { value: true }); var core = __webpack_require__(448); var pluginRequestLog = __webpack_require__(916); var pluginPaginateRest = __webpack_require__(299); -var pluginRestEndpointMethods = __webpack_require__(842); +var pluginRestEndpointMethods = __webpack_require__(250); const VERSION = "17.11.2"; @@ -27372,10 +26831,10 @@ exports.withCustomRequest = withCustomRequest; "use strict"; -const u = __webpack_require__(0).fromCallback +const u = __webpack_require__(676).fromCallback const path = __webpack_require__(622) const fs = __webpack_require__(598) -const mkdir = __webpack_require__(884) +const mkdir = __webpack_require__(727) const pathExists = __webpack_require__(322).pathExists function createLink (srcpath, dstpath, callback) { @@ -27443,10 +26902,185 @@ module.exports = { /* 907 */, /* 908 */, /* 909 */, -/* 910 */, +/* 910 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; +// JS-YAML's default schema for `load` function. +// It is not described in the YAML specification. +// +// This schema is based on JS-YAML's default safe schema and includes +// JavaScript-specific types: !!js/undefined, !!js/regexp and !!js/function. +// +// Also this schema is used as default base schema at `Schema.create` function. + + + + + +var Schema = __webpack_require__(43); + + +module.exports = Schema.DEFAULT = new Schema({ + include: [ + __webpack_require__(723) + ], + explicit: [ + __webpack_require__(386), + __webpack_require__(629), + __webpack_require__(839) + ] +}); + + +/***/ }), /* 911 */, /* 912 */, -/* 913 */, +/* 913 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +/*eslint-disable no-bitwise*/ + +var NodeBuffer; + +try { + // A trick for browserified version, to not include `Buffer` shim + var _require = require; + NodeBuffer = _require('buffer').Buffer; +} catch (__) {} + +var Type = __webpack_require__(945); + + +// [ 64, 65, 66 ] -> [ padding, CR, LF ] +var BASE64_MAP = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/=\n\r'; + + +function resolveYamlBinary(data) { + if (data === null) return false; + + var code, idx, bitlen = 0, max = data.length, map = BASE64_MAP; + + // Convert one by one. + for (idx = 0; idx < max; idx++) { + code = map.indexOf(data.charAt(idx)); + + // Skip CR/LF + if (code > 64) continue; + + // Fail on illegal characters + if (code < 0) return false; + + bitlen += 6; + } + + // If there are any bits left, source was corrupted + return (bitlen % 8) === 0; +} + +function constructYamlBinary(data) { + var idx, tailbits, + input = data.replace(/[\r\n=]/g, ''), // remove CR/LF & padding to simplify scan + max = input.length, + map = BASE64_MAP, + bits = 0, + result = []; + + // Collect by 6*4 bits (3 bytes) + + for (idx = 0; idx < max; idx++) { + if ((idx % 4 === 0) && idx) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } + + bits = (bits << 6) | map.indexOf(input.charAt(idx)); + } + + // Dump tail + + tailbits = (max % 4) * 6; + + if (tailbits === 0) { + result.push((bits >> 16) & 0xFF); + result.push((bits >> 8) & 0xFF); + result.push(bits & 0xFF); + } else if (tailbits === 18) { + result.push((bits >> 10) & 0xFF); + result.push((bits >> 2) & 0xFF); + } else if (tailbits === 12) { + result.push((bits >> 4) & 0xFF); + } + + // Wrap into Buffer for NodeJS and leave Array for browser + if (NodeBuffer) { + // Support node 6.+ Buffer API when available + return NodeBuffer.from ? NodeBuffer.from(result) : new NodeBuffer(result); + } + + return result; +} + +function representYamlBinary(object /*, style*/) { + var result = '', bits = 0, idx, tail, + max = object.length, + map = BASE64_MAP; + + // Convert every three bytes to 4 ASCII characters. + + for (idx = 0; idx < max; idx++) { + if ((idx % 3 === 0) && idx) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } + + bits = (bits << 8) + object[idx]; + } + + // Dump tail + + tail = max % 3; + + if (tail === 0) { + result += map[(bits >> 18) & 0x3F]; + result += map[(bits >> 12) & 0x3F]; + result += map[(bits >> 6) & 0x3F]; + result += map[bits & 0x3F]; + } else if (tail === 2) { + result += map[(bits >> 10) & 0x3F]; + result += map[(bits >> 4) & 0x3F]; + result += map[(bits << 2) & 0x3F]; + result += map[64]; + } else if (tail === 1) { + result += map[(bits >> 2) & 0x3F]; + result += map[(bits << 4) & 0x3F]; + result += map[64]; + result += map[64]; + } + + return result; +} + +function isBinary(object) { + return NodeBuffer && NodeBuffer.isBuffer(object); +} + +module.exports = new Type('tag:yaml.org,2002:binary', { + kind: 'scalar', + resolve: resolveYamlBinary, + construct: constructYamlBinary, + predicate: isBinary, + represent: representYamlBinary +}); + + +/***/ }), /* 914 */, /* 915 */, /* 916 */ @@ -27455,156 +27089,403 @@ module.exports = { "use strict"; -Object.defineProperty(exports, '__esModule', { value: true }); +Object.defineProperty(exports, '__esModule', { value: true }); + +const VERSION = "1.0.0"; + +/** + * @param octokit Octokit instance + * @param options Options passed to Octokit constructor + */ + +function requestLog(octokit) { + octokit.hook.wrap("request", (request, options) => { + octokit.log.debug("request", options); + const start = Date.now(); + const requestOptions = octokit.request.endpoint.parse(options); + const path = requestOptions.url.replace(options.baseUrl, ""); + return request(options).then(response => { + octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); + return response; + }).catch(error => { + octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); + throw error; + }); + }); +} +requestLog.VERSION = VERSION; + +exports.requestLog = requestLog; +//# sourceMappingURL=index.js.map + + +/***/ }), +/* 917 */, +/* 918 */, +/* 919 */, +/* 920 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +var constants = __webpack_require__(619) + +var origCwd = process.cwd +var cwd = null + +var platform = process.env.GRACEFUL_FS_PLATFORM || process.platform + +process.cwd = function() { + if (!cwd) + cwd = origCwd.call(process) + return cwd +} +try { + process.cwd() +} catch (er) {} + +var chdir = process.chdir +process.chdir = function(d) { + cwd = null + chdir.call(process, d) +} + +module.exports = patch + +function patch (fs) { + // (re-)implement some things that are known busted or missing. + + // lchmod, broken prior to 0.6.2 + // back-port the fix here. + if (constants.hasOwnProperty('O_SYMLINK') && + process.version.match(/^v0\.6\.[0-2]|^v0\.5\./)) { + patchLchmod(fs) + } + + // lutimes implementation, or no-op + if (!fs.lutimes) { + patchLutimes(fs) + } + + // https://github.com/isaacs/node-graceful-fs/issues/4 + // Chown should not fail on einval or eperm if non-root. + // It should not fail on enosys ever, as this just indicates + // that a fs doesn't support the intended operation. + + fs.chown = chownFix(fs.chown) + fs.fchown = chownFix(fs.fchown) + fs.lchown = chownFix(fs.lchown) + + fs.chmod = chmodFix(fs.chmod) + fs.fchmod = chmodFix(fs.fchmod) + fs.lchmod = chmodFix(fs.lchmod) + + fs.chownSync = chownFixSync(fs.chownSync) + fs.fchownSync = chownFixSync(fs.fchownSync) + fs.lchownSync = chownFixSync(fs.lchownSync) -const VERSION = "1.0.0"; + fs.chmodSync = chmodFixSync(fs.chmodSync) + fs.fchmodSync = chmodFixSync(fs.fchmodSync) + fs.lchmodSync = chmodFixSync(fs.lchmodSync) -/** - * @param octokit Octokit instance - * @param options Options passed to Octokit constructor - */ + fs.stat = statFix(fs.stat) + fs.fstat = statFix(fs.fstat) + fs.lstat = statFix(fs.lstat) -function requestLog(octokit) { - octokit.hook.wrap("request", (request, options) => { - octokit.log.debug("request", options); - const start = Date.now(); - const requestOptions = octokit.request.endpoint.parse(options); - const path = requestOptions.url.replace(options.baseUrl, ""); - return request(options).then(response => { - octokit.log.info(`${requestOptions.method} ${path} - ${response.status} in ${Date.now() - start}ms`); - return response; - }).catch(error => { - octokit.log.info(`${requestOptions.method} ${path} - ${error.status} in ${Date.now() - start}ms`); - throw error; - }); - }); -} -requestLog.VERSION = VERSION; + fs.statSync = statFixSync(fs.statSync) + fs.fstatSync = statFixSync(fs.fstatSync) + fs.lstatSync = statFixSync(fs.lstatSync) -exports.requestLog = requestLog; -//# sourceMappingURL=index.js.map + // if lchmod/lchown do not exist, then make them no-ops + if (!fs.lchmod) { + fs.lchmod = function (path, mode, cb) { + if (cb) process.nextTick(cb) + } + fs.lchmodSync = function () {} + } + if (!fs.lchown) { + fs.lchown = function (path, uid, gid, cb) { + if (cb) process.nextTick(cb) + } + fs.lchownSync = function () {} + } + // on Windows, A/V software can lock the directory, causing this + // to fail with an EACCES or EPERM if the directory contains newly + // created files. Try again on failure, for up to 60 seconds. -/***/ }), -/* 917 */ -/***/ (function(module, __unusedexports, __webpack_require__) { + // Set the timeout this long because some Windows Anti-Virus, such as Parity + // bit9, may lock files for up to a minute, causing npm package install + // failures. Also, take care to yield the scheduler. Windows scheduling gives + // CPU to a busy looping process, which can cause the program causing the lock + // contention to be starved of CPU by node, so the contention doesn't resolve. + if (platform === "win32") { + fs.rename = (function (fs$rename) { return function (from, to, cb) { + var start = Date.now() + var backoff = 0; + fs$rename(from, to, function CB (er) { + if (er + && (er.code === "EACCES" || er.code === "EPERM") + && Date.now() - start < 60000) { + setTimeout(function() { + fs.stat(to, function (stater, st) { + if (stater && stater.code === "ENOENT") + fs$rename(from, to, CB); + else + cb(er) + }) + }, backoff) + if (backoff < 100) + backoff += 10; + return; + } + if (cb) cb(er) + }) + }})(fs.rename) + } -"use strict"; + // if read() returns EAGAIN, then just try it again. + fs.read = (function (fs$read) { + function read (fd, buffer, offset, length, position, callback_) { + var callback + if (callback_ && typeof callback_ === 'function') { + var eagCounter = 0 + callback = function (er, _, __) { + if (er && er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + callback_.apply(this, arguments) + } + } + return fs$read.call(fs, fd, buffer, offset, length, position, callback) + } + // This ensures `util.promisify` works as it does for native `fs.read`. + read.__proto__ = fs$read + return read + })(fs.read) -/*eslint-disable max-len*/ + fs.readSync = (function (fs$readSync) { return function (fd, buffer, offset, length, position) { + var eagCounter = 0 + while (true) { + try { + return fs$readSync.call(fs, fd, buffer, offset, length, position) + } catch (er) { + if (er.code === 'EAGAIN' && eagCounter < 10) { + eagCounter ++ + continue + } + throw er + } + } + }})(fs.readSync) -var common = __webpack_require__(436); -var YAMLException = __webpack_require__(844); -var Type = __webpack_require__(547); + function patchLchmod (fs) { + fs.lchmod = function (path, mode, callback) { + fs.open( path + , constants.O_WRONLY | constants.O_SYMLINK + , mode + , function (err, fd) { + if (err) { + if (callback) callback(err) + return + } + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + fs.fchmod(fd, mode, function (err) { + fs.close(fd, function(err2) { + if (callback) callback(err || err2) + }) + }) + }) + } + fs.lchmodSync = function (path, mode) { + var fd = fs.openSync(path, constants.O_WRONLY | constants.O_SYMLINK, mode) -function compileList(schema, name, result) { - var exclude = []; + // prefer to return the chmod error, if one occurs, + // but still try to close, and report closing errors if they occur. + var threw = true + var ret + try { + ret = fs.fchmodSync(fd, mode) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret + } + } - schema.include.forEach(function (includedSchema) { - result = compileList(includedSchema, name, result); - }); + function patchLutimes (fs) { + if (constants.hasOwnProperty("O_SYMLINK")) { + fs.lutimes = function (path, at, mt, cb) { + fs.open(path, constants.O_SYMLINK, function (er, fd) { + if (er) { + if (cb) cb(er) + return + } + fs.futimes(fd, at, mt, function (er) { + fs.close(fd, function (er2) { + if (cb) cb(er || er2) + }) + }) + }) + } - schema[name].forEach(function (currentType) { - result.forEach(function (previousType, previousIndex) { - if (previousType.tag === currentType.tag && previousType.kind === currentType.kind) { - exclude.push(previousIndex); + fs.lutimesSync = function (path, at, mt) { + var fd = fs.openSync(path, constants.O_SYMLINK) + var ret + var threw = true + try { + ret = fs.futimesSync(fd, at, mt) + threw = false + } finally { + if (threw) { + try { + fs.closeSync(fd) + } catch (er) {} + } else { + fs.closeSync(fd) + } + } + return ret } - }); - result.push(currentType); - }); + } else { + fs.lutimes = function (_a, _b, _c, cb) { if (cb) process.nextTick(cb) } + fs.lutimesSync = function () {} + } + } - return result.filter(function (type, index) { - return exclude.indexOf(index) === -1; - }); -} + function chmodFix (orig) { + if (!orig) return orig + return function (target, mode, cb) { + return orig.call(fs, target, mode, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } + + function chmodFixSync (orig) { + if (!orig) return orig + return function (target, mode) { + try { + return orig.call(fs, target, mode) + } catch (er) { + if (!chownErOk(er)) throw er + } + } + } -function compileMap(/* lists... */) { - var result = { - scalar: {}, - sequence: {}, - mapping: {}, - fallback: {} - }, index, length; + function chownFix (orig) { + if (!orig) return orig + return function (target, uid, gid, cb) { + return orig.call(fs, target, uid, gid, function (er) { + if (chownErOk(er)) er = null + if (cb) cb.apply(this, arguments) + }) + } + } - function collectType(type) { - result[type.kind][type.tag] = result['fallback'][type.tag] = type; + function chownFixSync (orig) { + if (!orig) return orig + return function (target, uid, gid) { + try { + return orig.call(fs, target, uid, gid) + } catch (er) { + if (!chownErOk(er)) throw er + } + } } - for (index = 0, length = arguments.length; index < length; index += 1) { - arguments[index].forEach(collectType); + function statFix (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options, cb) { + if (typeof options === 'function') { + cb = options + options = null + } + function callback (er, stats) { + if (stats) { + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + } + if (cb) cb.apply(this, arguments) + } + return options ? orig.call(fs, target, options, callback) + : orig.call(fs, target, callback) + } } - return result; -} - - -function Schema(definition) { - this.include = definition.include || []; - this.implicit = definition.implicit || []; - this.explicit = definition.explicit || []; - this.implicit.forEach(function (type) { - if (type.loadKind && type.loadKind !== 'scalar') { - throw new YAMLException('There is a non-scalar type in the implicit list of a schema. Implicit resolving of such types is not supported.'); + function statFixSync (orig) { + if (!orig) return orig + // Older versions of Node erroneously returned signed integers for + // uid + gid. + return function (target, options) { + var stats = options ? orig.call(fs, target, options) + : orig.call(fs, target) + if (stats.uid < 0) stats.uid += 0x100000000 + if (stats.gid < 0) stats.gid += 0x100000000 + return stats; } - }); - - this.compiledImplicit = compileList(this, 'implicit', []); - this.compiledExplicit = compileList(this, 'explicit', []); - this.compiledTypeMap = compileMap(this.compiledImplicit, this.compiledExplicit); -} - - -Schema.DEFAULT = null; - + } -Schema.create = function createSchema() { - var schemas, types; + // ENOSYS means that the fs doesn't support the op. Just ignore + // that, because it doesn't matter. + // + // if there's no getuid, or if getuid() is something other + // than 0, and the error is EINVAL or EPERM, then just ignore + // it. + // + // This specific case is a silent failure in cp, install, tar, + // and most other unix tools that manage permissions. + // + // When running as root, or if other types of errors are + // encountered, then it's strict. + function chownErOk (er) { + if (!er) + return true - switch (arguments.length) { - case 1: - schemas = Schema.DEFAULT; - types = arguments[0]; - break; + if (er.code === "ENOSYS") + return true - case 2: - schemas = arguments[0]; - types = arguments[1]; - break; + var nonroot = !process.getuid || process.getuid() !== 0 + if (nonroot) { + if (er.code === "EINVAL" || er.code === "EPERM") + return true + } - default: - throw new YAMLException('Wrong number of arguments for Schema.create function'); + return false } +} - schemas = common.toArray(schemas); - types = common.toArray(types); - if (!schemas.every(function (schema) { return schema instanceof Schema; })) { - throw new YAMLException('Specified list of super schemas (or a single Schema object) contains a non-Schema object.'); - } +/***/ }), +/* 921 */ +/***/ (function(module, __unusedexports, __webpack_require__) { - if (!types.every(function (type) { return type instanceof Type; })) { - throw new YAMLException('Specified list of YAML types (or a single Type object) contains a non-Type object.'); - } +"use strict"; - return new Schema({ - include: schemas, - explicit: types - }); -}; +var Type = __webpack_require__(945); -module.exports = Schema; +module.exports = new Type('tag:yaml.org,2002:seq', { + kind: 'sequence', + construct: function (data) { return data !== null ? data : []; } +}); /***/ }), -/* 918 */, -/* 919 */, -/* 920 */, -/* 921 */, /* 922 */, /* 923 */ /***/ (function(__unusedmodule, exports, __webpack_require__) { @@ -27616,7 +27497,7 @@ const assert = __webpack_require__(357); const os = __webpack_require__(87); const path = __webpack_require__(622); const pathHelper = __webpack_require__(972); -const minimatch_1 = __webpack_require__(93); +const minimatch_1 = __webpack_require__(595); const internal_match_kind_1 = __webpack_require__(327); const internal_path_1 = __webpack_require__(383); const IS_WINDOWS = process.platform === 'win32'; @@ -28033,7 +27914,74 @@ function clone (obj) { /***/ }), /* 944 */, -/* 945 */, +/* 945 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var YAMLException = __webpack_require__(556); + +var TYPE_CONSTRUCTOR_OPTIONS = [ + 'kind', + 'resolve', + 'construct', + 'instanceOf', + 'predicate', + 'represent', + 'defaultStyle', + 'styleAliases' +]; + +var YAML_NODE_KINDS = [ + 'scalar', + 'sequence', + 'mapping' +]; + +function compileStyleAliases(map) { + var result = {}; + + if (map !== null) { + Object.keys(map).forEach(function (style) { + map[style].forEach(function (alias) { + result[String(alias)] = style; + }); + }); + } + + return result; +} + +function Type(tag, options) { + options = options || {}; + + Object.keys(options).forEach(function (name) { + if (TYPE_CONSTRUCTOR_OPTIONS.indexOf(name) === -1) { + throw new YAMLException('Unknown option "' + name + '" is met in definition of "' + tag + '" YAML type.'); + } + }); + + // TODO: Add tag format check. + this.tag = tag; + this.kind = options['kind'] || null; + this.resolve = options['resolve'] || function () { return true; }; + this.construct = options['construct'] || function (data) { return data; }; + this.instanceOf = options['instanceOf'] || null; + this.predicate = options['predicate'] || null; + this.represent = options['represent'] || null; + this.defaultStyle = options['defaultStyle'] || null; + this.styleAliases = compileStyleAliases(options['styleAliases'] || null); + + if (YAML_NODE_KINDS.indexOf(this.kind) === -1) { + throw new YAMLException('Unknown kind "' + this.kind + '" is specified for "' + tag + '" YAML type.'); + } +} + +module.exports = Type; + + +/***/ }), /* 946 */ /***/ (function(module, __unusedexports, __webpack_require__) { @@ -28067,7 +28015,66 @@ module.exports = { /***/ }), -/* 947 */, +/* 947 */ +/***/ (function(module, __unusedexports, __webpack_require__) { + +"use strict"; + + +var Type = __webpack_require__(945); + +var _toString = Object.prototype.toString; + +function resolveYamlPairs(data) { + if (data === null) return true; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + if (_toString.call(pair) !== '[object Object]') return false; + + keys = Object.keys(pair); + + if (keys.length !== 1) return false; + + result[index] = [ keys[0], pair[keys[0]] ]; + } + + return true; +} + +function constructYamlPairs(data) { + if (data === null) return []; + + var index, length, pair, keys, result, + object = data; + + result = new Array(object.length); + + for (index = 0, length = object.length; index < length; index += 1) { + pair = object[index]; + + keys = Object.keys(pair); + + result[index] = [ keys[0], pair[keys[0]] ]; + } + + return result; +} + +module.exports = new Type('tag:yaml.org,2002:pairs', { + kind: 'sequence', + resolve: resolveYamlPairs, + construct: constructYamlPairs +}); + + +/***/ }), /* 948 */ /***/ (function(module) { @@ -28872,7 +28879,7 @@ var __importStar = (this && this.__importStar) || function (mod) { return result; }; Object.defineProperty(exports, "__esModule", { value: true }); -const tr = __importStar(__webpack_require__(9)); +const tr = __importStar(__webpack_require__(686)); /** * Exec a command. * Output will be streamed to the live console. @@ -28901,36 +28908,17 @@ exports.exec = exec; /***/ }), /* 987 */, -/* 988 */, -/* 989 */, -/* 990 */, -/* 991 */, -/* 992 */, -/* 993 */, -/* 994 */, -/* 995 */, -/* 996 */, -/* 997 */ +/* 988 */ /***/ (function(module, __unusedexports, __webpack_require__) { "use strict"; -// Standard YAML's Core schema. -// http://www.yaml.org/spec/1.2/spec.html#id2804923 -// -// NOTE: JS-YAML does not support schema-specific tag resolution restrictions. -// So, Core schema has no distinctions from JSON schema is JS-YAML. - - +var Type = __webpack_require__(945); -var Schema = __webpack_require__(917); - - -module.exports = new Schema({ - include: [ - __webpack_require__(304) - ] +module.exports = new Type('tag:yaml.org,2002:map', { + kind: 'mapping', + construct: function (data) { return data !== null ? data : {}; } }); diff --git a/package-lock.json b/package-lock.json index 393066d9..912ea96a 100644 --- a/package-lock.json +++ b/package-lock.json @@ -987,9 +987,9 @@ } }, "@kie/build-chain-configuration-reader": { - "version": "2.0.1", - "resolved": "https://registry.npmjs.org/@kie/build-chain-configuration-reader/-/build-chain-configuration-reader-2.0.1.tgz", - "integrity": "sha512-XLZD+pcXY6C6looe7SlNnOsaFIPcxAM0k9nlDpuJiirzPKGxVXeObnOq2fz5mdxABMfgLlR3xmSVAOIK/upVDg==", + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/@kie/build-chain-configuration-reader/-/build-chain-configuration-reader-2.0.2.tgz", + "integrity": "sha512-pYjSA8QlON/VEuahzgUbDyeAey2XwUn/BYasJmjEdeDnQ6ltMiTTJK29v6HAuh/umgXOIbu172gzR/qIO4gAZA==", "requires": { "@actions/artifact": "^0.3.5", "@actions/core": "^1.1.3", diff --git a/package.json b/package.json index 879b4d4c..53a29860 100644 --- a/package.json +++ b/package.json @@ -1,6 +1,6 @@ { "name": "@kie/build-chain-action", - "version": "2.2.2", + "version": "2.2.3", "description": "GitHub action to define action chains", "main": "dist/build-chain-cli.js", "author": "Enrique Mingorance Cano ", @@ -28,7 +28,7 @@ "@actions/core": "^1.1.3", "@actions/exec": "^1.0.4", "@actions/glob": "^0.1.0", - "@kie/build-chain-configuration-reader": "^2.0.1", + "@kie/build-chain-configuration-reader": "^2.0.2", "@octokit/rest": "^17.6.0", "argparse": "^2.0.1", "fs-extra": "^9.0.0",