diff --git a/action.yml b/action.yml
index 8b5342b..650ba2d 100644
--- a/action.yml
+++ b/action.yml
@@ -20,6 +20,7 @@ inputs:
paths_filter:
description: 'A YAML-string with named paths_ignore/paths patterns'
required: false
+ default: '{}'
cancel_others:
description: 'If true, then workflow runs from outdated commits will be cancelled'
required: false
diff --git a/dist/index.js b/dist/index.js
index 26dcf56..a202e7c 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -38,92 +38,209 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
+var __classPrivateFieldGet = (this && this.__classPrivateFieldGet) || function (receiver, state, kind, f) {
+ if (kind === "a" && !f) throw new TypeError("Private accessor was defined without a getter");
+ if (typeof state === "function" ? receiver !== state || !f : !state.has(receiver)) throw new TypeError("Cannot read private member from an object whose class did not declare it");
+ return kind === "m" ? f : kind === "a" ? f.call(receiver) : f ? f.value : state.get(receiver);
+};
var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
+var _PathsResult_pathsResult, _PathsResult_unknownFilters;
Object.defineProperty(exports, "__esModule", ({ value: true }));
const core = __importStar(__nccwpck_require__(2186));
const github = __importStar(__nccwpck_require__(5438));
+const artifact = __importStar(__nccwpck_require__(2605));
const micromatch_1 = __importDefault(__nccwpck_require__(6228));
+const parse_json_1 = __importDefault(__nccwpck_require__(4285));
const js_yaml_1 = __importDefault(__nccwpck_require__(1917));
-const workflowRunTriggerOptions = [
+const fs = __importStar(__nccwpck_require__(3977));
+const zod_1 = __nccwpck_require__(3301);
+const jszip_1 = __importDefault(__nccwpck_require__(3592));
+const compress_json_1 = __nccwpck_require__(558);
+const WorkflowRunTriggerType = zod_1.z.enum([
'pull_request',
'push',
'workflow_dispatch',
'schedule',
'release'
-];
-const concurrentSkippingOptions = [
+]);
+const WorkflowRunStatusType = zod_1.z.enum(['queued', 'in_progress', 'completed']);
+const WorkflowRunConclusionType = zod_1.z.enum([
+ 'success',
+ 'failure',
+ 'neutral',
+ 'cancelled',
+ 'skipped',
+ 'timed_out'
+]);
+const ChangedFilesType = zod_1.z.array(zod_1.z.array(zod_1.z.string()));
+const ArtifactPathsResultType = zod_1.z.object({
+ should_skip: zod_1.z.boolean().nullable(),
+ backtrack_count: zod_1.z.number(),
+ skipped_by: zod_1.z.optional(zod_1.z.number()),
+ matched_files: zod_1.z.optional(zod_1.z.array(zod_1.z.string()))
+});
+const ArtifactResultType = zod_1.z.object({
+ should_skip: zod_1.z.boolean(),
+ reason: zod_1.z.string(),
+ skipped_by: zod_1.z.optional(zod_1.z.number()),
+ paths_result: zod_1.z.optional(zod_1.z.record(zod_1.z.string(), ArtifactPathsResultType)),
+ changed_files: zod_1.z.optional(ChangedFilesType)
+});
+const ArtifactRunsType = zod_1.z.record(
+/** Run ID */
+zod_1.z.number(), zod_1.z.object({
+ /** Tree Hash */
+ t: zod_1.z.optional(zod_1.z.string()),
+ /** Result */
+ r: ArtifactResultType
+}));
+const ArtifactDataType = zod_1.z
+ .object({
+ /* Version */
+ v: zod_1.z.literal(1),
+ /* Workflow Runs */
+ r: ArtifactRunsType
+})
+ .strict();
+const PathsFilterType = zod_1.z.record(zod_1.z.string(), zod_1.z.object({
+ paths_ignore: zod_1.z.array(zod_1.z.string()).default([]),
+ paths: zod_1.z.array(zod_1.z.string()).default([]),
+ backtracking: zod_1.z.union([zod_1.z.boolean(), zod_1.z.number()]).default(true)
+}));
+class PathsResult {
+ constructor() {
+ _PathsResult_pathsResult.set(this, {});
+ _PathsResult_unknownFilters.set(this, new Set());
+ }
+ addFilter(name) {
+ __classPrivateFieldGet(this, _PathsResult_pathsResult, "f")[name] = { should_skip: null, backtrack_count: 0 };
+ __classPrivateFieldGet(this, _PathsResult_unknownFilters, "f").add(name);
+ }
+ updateFilter(name, properties) {
+ Object.assign(__classPrivateFieldGet(this, _PathsResult_pathsResult, "f")[name], properties);
+ if (__classPrivateFieldGet(this, _PathsResult_pathsResult, "f")[name].should_skip !== null) {
+ __classPrivateFieldGet(this, _PathsResult_unknownFilters, "f").delete(name);
+ }
+ }
+ getFilter(name) {
+ return __classPrivateFieldGet(this, _PathsResult_pathsResult, "f")[name];
+ }
+ getUnknownFilters() {
+ return __classPrivateFieldGet(this, _PathsResult_unknownFilters, "f").values();
+ }
+ hasUnknownFilters() {
+ return __classPrivateFieldGet(this, _PathsResult_unknownFilters, "f").size > 0;
+ }
+ get output() {
+ return __classPrivateFieldGet(this, _PathsResult_pathsResult, "f");
+ }
+ get artifact() {
+ return Object.entries(__classPrivateFieldGet(this, _PathsResult_pathsResult, "f")).reduce((accumulator, [key, value]) => {
+ accumulator[key] = (value.skipped_by ? Object.assign(Object.assign({}, value), { skipped_by: value.skipped_by.id }) : value);
+ return accumulator;
+ }, {});
+ }
+}
+_PathsResult_pathsResult = new WeakMap(), _PathsResult_unknownFilters = new WeakMap();
+class Result {
+ constructor(result) {
+ this.result = result;
+ }
+ get output() {
+ return Object.assign(Object.assign({}, this.result), (this.result.paths_result && {
+ paths_result: this.result.paths_result.output
+ }));
+ }
+ get artifact() {
+ return Object.assign(Object.assign(Object.assign({}, this.result), (this.result.skipped_by && { skipped_by: this.result.skipped_by.id })), (this.result.paths_result && {
+ paths_result: this.result.paths_result.artifact
+ }));
+ }
+}
+const ConcurrentSkippingType = zod_1.z.enum([
'always',
'same_content',
'same_content_newer',
'outdated_runs',
'never'
-];
+]);
+const InputsType = zod_1.z.object({
+ paths: zod_1.z.array(zod_1.z.string()),
+ paths_ignore: zod_1.z.array(zod_1.z.string()),
+ paths_filter: PathsFilterType,
+ do_not_skip: zod_1.z.array(WorkflowRunTriggerType),
+ concurrent_skipping: ConcurrentSkippingType,
+ cancel_others: zod_1.z.boolean(),
+ skip_after_successful_duplicates: zod_1.z.boolean()
+});
+const ARTIFACT_FILE_NAME = 'data.json';
+const getArtifactName = (workflowId) => `skip-duplicate-actions-${workflowId}-${github.context.job}-${github.context.action}`;
class SkipDuplicateActions {
constructor(inputs, context) {
+ this.inputs = inputs;
+ this.context = context;
this.globOptions = {
dot: true // Match dotfiles. Otherwise dotfiles are ignored unless a "." is explicitly defined in the pattern.
};
- this.inputs = inputs;
- this.context = context;
}
run() {
+ var _a;
return __awaiter(this, void 0, void 0, function* () {
// Cancel outdated runs.
- if (this.inputs.cancelOthers) {
+ if (this.inputs.cancel_others) {
yield this.cancelOutdatedRuns();
}
// Abort early if current run has been triggered by an event that should never be skipped.
- if (this.inputs.doNotSkip.includes(this.context.currentRun.event)) {
+ if (this.inputs.do_not_skip.includes(this.context.currentRun.event)) {
core.info(`Do not skip execution because the workflow was triggered with '${this.context.currentRun.event}'`);
- yield exitSuccess({
- shouldSkip: false,
+ yield this.exitSuccess({
+ should_skip: false,
reason: 'do_not_skip'
});
}
// Skip on successful duplicate run.
- if (this.inputs.skipAfterSuccessfulDuplicates) {
- const successfulDuplicateRun = this.findSuccessfulDuplicateRun(this.context.currentRun.treeHash);
+ if (this.inputs.skip_after_successful_duplicates) {
+ const successfulDuplicateRun = this.findSuccessfulDuplicateRun(this.context.currentRun.event === 'pull_request'
+ ? (_a = this.context.artifactRuns.get(this.context.currentRun)) === null || _a === void 0 ? void 0 : _a.t
+ : this.context.currentRun.treeHash);
if (successfulDuplicateRun) {
core.info(`Skip execution because the exact same files have been successfully checked in run ${successfulDuplicateRun.htmlUrl}`);
- yield exitSuccess({
- shouldSkip: true,
+ yield this.exitSuccess({
+ should_skip: true,
reason: 'skip_after_successful_duplicate',
- skippedBy: successfulDuplicateRun
+ skipped_by: successfulDuplicateRun
});
}
}
// Skip on concurrent runs.
- if (this.inputs.concurrentSkipping !== 'never') {
+ if (this.inputs.concurrent_skipping !== 'never') {
const concurrentRun = this.detectConcurrentRuns();
if (concurrentRun) {
- yield exitSuccess({
- shouldSkip: true,
+ yield this.exitSuccess({
+ should_skip: true,
reason: 'concurrent_skipping',
- skippedBy: concurrentRun
+ skipped_by: concurrentRun
});
}
}
// Skip on path matches.
if (this.inputs.paths.length >= 1 ||
- this.inputs.pathsIgnore.length >= 1 ||
- Object.keys(this.inputs.pathsFilter).length >= 1) {
- const { changedFiles, pathsResult } = yield this.backtracePathSkipping();
- yield exitSuccess({
- shouldSkip: pathsResult.global.should_skip === 'unknown'
+ this.inputs.paths_ignore.length >= 1 ||
+ Object.keys(this.inputs.paths_filter).length >= 1) {
+ const { pathsResult, changedFiles } = yield this.backtracePathSkipping();
+ const globalPathsResult = pathsResult.getFilter('global');
+ yield this.exitSuccess(Object.assign(Object.assign({ should_skip: globalPathsResult.should_skip === null
? false
- : pathsResult.global.should_skip,
- reason: 'paths',
- skippedBy: pathsResult.global.skipped_by,
- pathsResult,
- changedFiles
- });
+ : globalPathsResult.should_skip, reason: 'paths' }, (globalPathsResult.skipped_by && {
+ skipped_by: globalPathsResult.skipped_by
+ })), { paths_result: pathsResult, changed_files: changedFiles }));
}
// Do not skip otherwise.
- core.info('Do not skip execution because we did not find a transferable run');
- yield exitSuccess({
- shouldSkip: false,
+ core.info('Do not skip execution because no transferable run could be found');
+ yield this.exitSuccess({
+ should_skip: false,
reason: 'no_transferable_run'
});
});
@@ -151,18 +268,28 @@ class SkipDuplicateActions {
core.info(`Cancelled run ${victim.htmlUrl} with response code ${res.status}`);
}
catch (error) {
- if (error instanceof Error || typeof error === 'string') {
- core.warning(error);
- }
- core.warning(`Failed to cancel ${victim.htmlUrl}`);
+ core.warning(composeErrorMessage({
+ title: `Failed to cancel run ${victim.htmlUrl}`,
+ error
+ }));
}
}
});
}
findSuccessfulDuplicateRun(treeHash) {
- return this.context.olderRuns.find(run => run.treeHash === treeHash &&
- run.status === 'completed' &&
- run.conclusion === 'success');
+ if (!treeHash) {
+ return;
+ }
+ // TODO Assign tree hash from artifact data for pull request runs. Ignore the run if the hash is not available.
+ return this.context.olderRuns.find(run => {
+ var _a;
+ const runTreeHash = run.event === 'pull_request'
+ ? (_a = this.context.artifactRuns.get(run)) === null || _a === void 0 ? void 0 : _a.t
+ : run.treeHash;
+ return (runTreeHash === treeHash &&
+ run.status === 'completed' &&
+ run.conclusion === 'success');
+ });
}
detectConcurrentRuns() {
const concurrentRuns = this.context.allRuns.filter(run => run.status !== 'completed');
@@ -170,11 +297,11 @@ class SkipDuplicateActions {
core.info('Did not find any concurrent workflow runs');
return;
}
- if (this.inputs.concurrentSkipping === 'always') {
+ if (this.inputs.concurrent_skipping === 'always') {
core.info(`Skip execution because another instance of the same workflow is already running in ${concurrentRuns[0].htmlUrl}`);
return concurrentRuns[0];
}
- else if (this.inputs.concurrentSkipping === 'outdated_runs') {
+ else if (this.inputs.concurrent_skipping === 'outdated_runs') {
const newerRun = concurrentRuns.find(run => new Date(run.createdAt).getTime() >
new Date(this.context.currentRun.createdAt).getTime());
if (newerRun) {
@@ -182,14 +309,14 @@ class SkipDuplicateActions {
return newerRun;
}
}
- else if (this.inputs.concurrentSkipping === 'same_content') {
+ else if (this.inputs.concurrent_skipping === 'same_content') {
const concurrentDuplicate = concurrentRuns.find(run => run.treeHash === this.context.currentRun.treeHash);
if (concurrentDuplicate) {
core.info(`Skip execution because the exact same files are concurrently checked in run ${concurrentDuplicate.htmlUrl}`);
return concurrentDuplicate;
}
}
- else if (this.inputs.concurrentSkipping === 'same_content_newer') {
+ else if (this.inputs.concurrent_skipping === 'same_content_newer') {
const concurrentIsOlder = concurrentRuns.find(run => run.treeHash === this.context.currentRun.treeHash &&
run.runNumber < this.context.currentRun.runNumber);
if (concurrentIsOlder) {
@@ -200,57 +327,55 @@ class SkipDuplicateActions {
core.info(`Did not find any concurrent workflow runs that justify skipping`);
}
backtracePathSkipping() {
- var _a, _b;
return __awaiter(this, void 0, void 0, function* () {
let commit;
let iterSha = this.context.currentRun.commitHash;
let distanceToHEAD = 0;
const allChangedFiles = [];
- const pathsFilter = Object.assign(Object.assign({}, this.inputs.pathsFilter), { global: {
+ // The global paths settings are added under a "global" filter.
+ const pathsFilter = Object.assign(Object.assign({}, this.inputs.paths_filter), { global: {
paths: this.inputs.paths,
- paths_ignore: this.inputs.pathsIgnore,
+ paths_ignore: this.inputs.paths_ignore,
backtracking: true
} });
- const pathsResult = {};
+ // Add all defined filters to the result.
+ const pathsResult = new PathsResult();
for (const name of Object.keys(pathsFilter)) {
- pathsResult[name] = { should_skip: 'unknown', backtrack_count: 0 };
+ pathsResult.addFilter(name);
}
do {
commit = yield this.fetchCommitDetails(iterSha);
if (!commit) {
break;
}
- iterSha = ((_a = commit.parents) === null || _a === void 0 ? void 0 : _a.length) ? (_b = commit.parents[0]) === null || _b === void 0 ? void 0 : _b.sha : null;
+ iterSha = commit.parents.length ? commit.parents[0].sha : undefined;
const changedFiles = commit.files
? commit.files
.map(file => file.filename)
.filter(file => typeof file === 'string')
: [];
- allChangedFiles.push({
- sha: commit.sha,
- htmlUrl: commit.html_url,
- changedFiles
- });
- const successfulRun = (distanceToHEAD >= 1 &&
+ allChangedFiles.push(changedFiles);
+ const successfulRun = (distanceToHEAD > 0 &&
this.findSuccessfulDuplicateRun(commit.commit.tree.sha)) ||
undefined;
- for (const [name, values] of Object.entries(pathsResult)) {
- // Only process paths where status has not yet been determined.
- if (values.should_skip !== 'unknown')
- continue;
+ for (const name of pathsResult.getUnknownFilters()) {
// Skip if paths were ignorable or skippable until now and there is a successful run for the current commit.
if (successfulRun) {
- pathsResult[name].should_skip = true;
- pathsResult[name].skipped_by = successfulRun;
- pathsResult[name].backtrack_count = distanceToHEAD;
+ pathsResult.updateFilter(name, {
+ should_skip: true,
+ skipped_by: successfulRun,
+ backtrack_count: distanceToHEAD
+ });
core.info(`Skip '${name}' because all changes since run ${successfulRun.htmlUrl} are in ignored or skipped paths`);
continue;
}
// Check if backtracking limit has been reached.
if ((pathsFilter[name].backtracking === false && distanceToHEAD === 1) ||
pathsFilter[name].backtracking === distanceToHEAD) {
- pathsResult[name].should_skip = false;
- pathsResult[name].backtrack_count = distanceToHEAD;
+ pathsResult.updateFilter(name, {
+ should_skip: false,
+ backtrack_count: distanceToHEAD
+ });
core.info(`Stop backtracking for '${name}' because the defined limit has been reached`);
continue;
}
@@ -267,20 +392,24 @@ class SkipDuplicateActions {
continue;
}
else {
- pathsResult[name].matched_files = matches;
+ pathsResult.updateFilter(name, {
+ matched_files: matches
+ });
}
}
// Not ignorable or skippable.
- pathsResult[name].should_skip = false;
- pathsResult[name].backtrack_count = distanceToHEAD;
+ pathsResult.updateFilter(name, {
+ should_skip: false,
+ backtrack_count: distanceToHEAD
+ });
core.info(`Stop backtracking for '${name}' at commit ${commit.html_url} because '${changedFiles}' are not skippable against paths '${pathsFilter[name].paths}' or paths_ignore '${pathsFilter[name].paths_ignore}'`);
}
- // Should be never reached in practice; we expect that this loop aborts after 1-3 iterations.
+ // Should be never reached in practice; this loop is expected to abort after 1-3 iterations.
if (distanceToHEAD++ >= 50) {
core.warning('Aborted commit-backtracing due to bad performance - Did you push an excessive number of ignored-path commits?');
break;
}
- } while (Object.keys(pathsResult).some(path => pathsResult[path].should_skip === 'unknown'));
+ } while (pathsResult.hasUnknownFilters());
return { pathsResult, changedFiles: allChangedFiles };
});
}
@@ -298,35 +427,112 @@ class SkipDuplicateActions {
fetchCommitDetails(sha) {
return __awaiter(this, void 0, void 0, function* () {
if (!sha) {
- return null;
+ return undefined;
}
try {
return (yield this.context.octokit.rest.repos.getCommit(Object.assign(Object.assign({}, this.context.repo), { ref: sha }))).data;
}
catch (error) {
- if (error instanceof Error || typeof error === 'string') {
- core.warning(error);
+ core.warning(composeErrorMessage({ title: `Failed to retrieve commit ${sha}`, error }));
+ return undefined;
+ }
+ });
+ }
+ /** Set all outputs and exit the action. */
+ exitSuccess(result) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const tasks = [];
+ // Generate job summary.
+ const { output: outputResult, artifact: artifactResult } = new Result(result);
+ const summary = [
+ '
',
+ 'Should Skip
',
+ `${outputResult.should_skip ? 'Yes' : 'No'} | `,
+ `${outputResult.should_skip} | `,
+ '
',
+ 'Reason
',
+ `${outputResult.reason} | `,
+ '
'
+ ];
+ if (outputResult.skipped_by) {
+ summary.push('Skipped By
');
+ }
+ if (outputResult.paths_result) {
+ summary.push('Paths Result
', `${JSON.stringify(outputResult.paths_result, null, 2)} | `, '
');
+ }
+ if (outputResult.changed_files) {
+ summary.push('Changed Files
', outputResult.changed_files
+ .map((commit, index) => `${index} | ${commit
+ .map(file => `${file} `)
+ .join('')} |
`)
+ .join(''), '
');
+ }
+ tasks.push(core.summary.addRaw(summary.join('')).write());
+ // Prepare artifact data.
+ const artifactRuns = {};
+ this.context.artifactRuns.set(this.context.currentRun, Object.assign(Object.assign({}, this.context.artifactRuns.get(this.context.currentRun)), { r: artifactResult }));
+ for (const [key, value] of this.context.artifactRuns) {
+ artifactRuns[key.id] = value;
+ }
+ const artifactData = {
+ v: 1,
+ r: artifactRuns
+ };
+ try {
+ // Upload artifact.
+ yield fs.writeFile(ARTIFACT_FILE_NAME, JSON.stringify((0, compress_json_1.compress)(artifactData)));
+ const artifactClient = artifact.create();
+ yield core.group('Upload artifact data', () => __awaiter(this, void 0, void 0, function* () {
+ yield artifactClient.uploadArtifact(getArtifactName(this.context.currentRun.workflowId), [ARTIFACT_FILE_NAME], '.',
+ // Reduce retention days a bit.
+ { retentionDays: 60 });
+ }));
+ // Try to remove older artifacts.
+ // Only run if upload was successful and leave the last 4 artifacts for potential concurrent running workflows.
+ for (const id of this.context.artifactIds.slice(4)) {
+ tasks.push(this.context.octokit.rest.actions.deleteArtifact(Object.assign(Object.assign({}, this.context.repo), { artifact_id: id })));
}
- core.warning(`Failed to retrieve commit ${sha}`);
- return null;
}
+ catch (error) {
+ core.warning(composeErrorMessage({ title: 'Failed to upload artifact data', error }));
+ }
+ // Set all outputs.
+ core.setOutput('should_skip', outputResult.should_skip);
+ core.setOutput('reason', outputResult.reason);
+ core.setOutput('skipped_by', outputResult.skipped_by || {});
+ core.setOutput('paths_result', outputResult.paths_result || {});
+ core.setOutput('changed_files', outputResult.changed_files || []);
+ // Wait for all tasks to be finished (ignore errors).
+ yield Promise.allSettled(tasks);
+ process.exit(0);
});
}
}
function main() {
- var _a;
+ var _a, _b, _c, _d;
return __awaiter(this, void 0, void 0, function* () {
// Get and validate inputs.
- const token = core.getInput('github_token', { required: true });
- const inputs = {
- paths: getStringArrayInput('paths'),
- pathsIgnore: getStringArrayInput('paths_ignore'),
- pathsFilter: getPathsFilterInput('paths_filter'),
- doNotSkip: getDoNotSkipInput('do_not_skip'),
- concurrentSkipping: getConcurrentSkippingInput('concurrent_skipping'),
- cancelOthers: core.getBooleanInput('cancel_others'),
- skipAfterSuccessfulDuplicates: core.getBooleanInput('skip_after_successful_duplicate')
- };
+ let token;
+ let inputs;
+ try {
+ token = core.getInput('github_token', { required: true });
+ inputs = InputsType.parse({
+ paths: getJsonInput('paths'),
+ paths_ignore: getJsonInput('paths_ignore'),
+ paths_filter: getYamlInput('paths_filter'),
+ do_not_skip: getJsonInput('do_not_skip'),
+ concurrent_skipping: core.getInput('concurrent_skipping'),
+ cancel_others: core.getBooleanInput('cancel_others'),
+ skip_after_successful_duplicates: core.getBooleanInput('skip_after_successful_duplicate')
+ });
+ }
+ catch (error) {
+ exitFail(composeErrorMessage({
+ title: 'Error with input values',
+ error: error instanceof zod_1.z.ZodError ? composeZodError(error) : error
+ }));
+ }
+ // Get repo and octokit instance.
const repo = github.context.repo;
const octokit = github.getOctokit(token);
// Get and parse the current workflow run.
@@ -340,18 +546,62 @@ function main() {
`);
}
const currentRun = mapWorkflowRun(apiCurrentRun, currentTreeHash);
+ // TODO
+ const artifactRuns = new Map();
+ // Add current run to artifact runs.
+ const currentArtifactRun = {};
+ // Get tree hash of the merge commit on pull request events.
+ if (apiCurrentRun.event === 'pull_request') {
+ const { data: commit } = yield octokit.rest.repos.getCommit(Object.assign(Object.assign({}, repo), { ref: github.context.sha }));
+ currentArtifactRun.t = commit.commit.tree.sha;
+ }
+ artifactRuns.set(currentRun, currentArtifactRun);
// Fetch list of runs for current workflow.
const { data: { workflow_runs: apiAllRuns } } = yield octokit.rest.actions.listWorkflowRuns(Object.assign(Object.assign({}, repo), { workflow_id: currentRun.workflowId, per_page: 100 }));
+ // Get and parse artifact data.
+ let artifactIds = [];
+ let artifactData = {};
+ try {
+ const { data: { artifacts: apiAllArtifacts } } = yield octokit.rest.actions.listArtifactsForRepo(Object.assign(Object.assign({}, repo), { per_page: 100 }));
+ const artifactName = getArtifactName(currentRun.workflowId);
+ const currentArtifacts = apiAllArtifacts.filter(item => item.name === artifactName);
+ artifactIds = currentArtifacts.map(item => item.id);
+ const latestArtifact = currentArtifacts[0];
+ if (latestArtifact) {
+ core.info(`Got artifact data from ${((_b = latestArtifact.workflow_run) === null || _b === void 0 ? void 0 : _b.id)
+ ? `run ${latestArtifact.workflow_run.id}`
+ : 'unknown run'}`);
+ const { data: latestArtifactData } = yield octokit.rest.actions.downloadArtifact(Object.assign(Object.assign({}, repo), { artifact_id: latestArtifact.id, archive_format: 'zip' }));
+ const zip = yield jszip_1.default.loadAsync(Buffer.from(latestArtifactData), {
+ base64: true
+ });
+ const file = yield ((_c = zip.file(ARTIFACT_FILE_NAME)) === null || _c === void 0 ? void 0 : _c.async('string'));
+ if (file) {
+ artifactData = ArtifactDataType.parse((0, compress_json_1.decompress)((0, parse_json_1.default)(file))).r;
+ }
+ }
+ }
+ catch (error) {
+ core.warning(composeErrorMessage({
+ title: 'Failed to get artifact data',
+ error: error instanceof zod_1.z.ZodError ? composeZodError(error, 1) : error
+ }));
+ }
// List with all workflow runs.
const allRuns = [];
// List with older workflow runs only (used to prevent some nasty race conditions and edge cases).
const olderRuns = [];
// Check and map all runs.
for (const run of apiAllRuns) {
- // Filter out current run and runs that lack 'head_commit' (most likely runs associated with a headless or removed commit).
+ // Filter out current run.
+ if (run.id === currentRun.id) {
+ continue;
+ }
+ // Filter out runs that lack 'head_commit' (most likely runs associated with a headless or removed commit).
// See https://github.com/fkirc/skip-duplicate-actions/pull/178.
- if (run.id !== currentRun.id && run.head_commit) {
- const mappedRun = mapWorkflowRun(run, run.head_commit.tree_id);
+ const treeHash = (_d = run.head_commit) === null || _d === void 0 ? void 0 : _d.tree_id;
+ if (treeHash) {
+ const mappedRun = mapWorkflowRun(run, treeHash);
// Add to list of all runs.
allRuns.push(mappedRun);
// Check if run can be added to list of older runs.
@@ -359,6 +609,9 @@ function main() {
new Date(currentRun.createdAt).getTime()) {
olderRuns.push(mappedRun);
}
+ if (run.id in artifactData) {
+ artifactRuns.set(mappedRun, artifactData[run.id]);
+ }
}
}
const skipDuplicateActions = new SkipDuplicateActions(inputs, {
@@ -366,11 +619,26 @@ function main() {
octokit,
currentRun,
allRuns,
- olderRuns
+ olderRuns,
+ artifactRuns,
+ artifactIds
});
yield skipDuplicateActions.run();
});
}
+/* Parse action input as JSON. */
+function getJsonInput(name) {
+ const input = core.getInput(name);
+ if (input) {
+ return (0, parse_json_1.default)(core.getInput(name), name);
+ }
+ return undefined;
+}
+/* Parse action input as YAML. */
+function getYamlInput(name) {
+ return js_yaml_1.default.load(core.getInput(name), { filename: name });
+}
+/* Pick selected data from workflow run. */
function mapWorkflowRun(run, treeHash) {
var _a, _b;
return {
@@ -383,257 +651,80 @@ function mapWorkflowRun(run, treeHash) {
conclusion: run.conclusion,
htmlUrl: run.html_url,
branch: run.head_branch,
- // Wrong type: 'head_repository' can be null (probably when repo has been removed)
+ // Wrong type: 'head_repository' can be null
+ // (see https://github.com/github/rest-api-description/issues/1586)
repo: (_b = (_a = run.head_repository) === null || _a === void 0 ? void 0 : _a.full_name) !== null && _b !== void 0 ? _b : null,
workflowId: run.workflow_id,
createdAt: run.created_at
};
}
-/** Set all outputs and exit the action. */
-function exitSuccess(args) {
- var _a;
- return __awaiter(this, void 0, void 0, function* () {
- const summary = [
- '',
- '',
- '',
- 'Should Skip | ',
- `${args.shouldSkip ? 'Yes' : 'No'} (${args.shouldSkip}) | `,
- '
',
- '',
- 'Reason | ',
- `${args.reason} | `,
- '
'
- ];
- if (args.skippedBy) {
- summary.push('', 'Skipped By | ', `${args.skippedBy.runNumber} | `, '
');
- }
- if (args.pathsResult) {
- summary.push('', 'Paths Result | ', `${JSON.stringify(args.pathsResult, null, 2)} | `, '
');
- }
- if (args.changedFiles) {
- const changedFiles = args.changedFiles
- .map(commit => `${commit.sha.substring(0, 7)}:
- ${commit.changedFiles
- .map(file => `- ${file}
`)
- .join('')}
`)
- .join('');
- summary.push('', 'Changed Files | ', `${changedFiles} | `, '
');
- }
- summary.push('
');
- yield core.summary.addRaw(summary.join('')).write();
- core.setOutput('should_skip', args.shouldSkip);
- core.setOutput('reason', args.reason);
- core.setOutput('skipped_by', args.skippedBy || {});
- core.setOutput('paths_result', args.pathsResult || {});
- core.setOutput('changed_files', ((_a = args.changedFiles) === null || _a === void 0 ? void 0 : _a.map(commit => commit.changedFiles)) || []);
- process.exit(0);
- });
-}
-/** Immediately terminate the action with failing exit code. */
-function exitFail(error) {
- if (error instanceof Error || typeof error == 'string') {
- core.error(error);
- }
- process.exit(1);
-}
-function getStringArrayInput(name) {
- const rawInput = core.getInput(name);
- if (!rawInput) {
- return [];
- }
- try {
- const array = JSON.parse(rawInput);
- if (!Array.isArray(array)) {
- exitFail(`Input '${rawInput}' is not a JSON-array`);
- }
- for (const element of array) {
- if (typeof element !== 'string') {
- exitFail(`Element '${element}' of input '${rawInput}' is not a string`);
- }
- }
- return array;
- }
- catch (error) {
- if (error instanceof Error || typeof error === 'string') {
- core.error(error);
- }
- exitFail(`Input '${rawInput}' is not a valid JSON`);
- }
-}
-function getDoNotSkipInput(name) {
- const rawInput = core.getInput(name);
- if (!rawInput) {
- return [];
- }
- try {
- const array = JSON.parse(rawInput);
- if (!Array.isArray(array)) {
- exitFail(`Input '${rawInput}' is not a JSON-array`);
- }
- for (const element of array) {
- if (!workflowRunTriggerOptions.includes(element)) {
- exitFail(`Elements in '${name}' must be one of ${workflowRunTriggerOptions
- .map(option => `"${option}"`)
- .join(', ')}`);
+/** Compose Zod errors. */
+function composeZodError(error, limit) {
+ const errors = [];
+ for (const issue of error.issues.slice(0, limit)) {
+ let keyPath = '';
+ for (const [index, key] of issue.path.entries()) {
+ switch (typeof key) {
+ case 'string':
+ keyPath += `${index > 0 ? '.' : ''}${key}`;
+ break;
+ case 'number':
+ keyPath += `[${key}]`;
+ break;
}
}
- return array;
- }
- catch (error) {
- if (error instanceof Error || typeof error === 'string') {
- core.error(error);
- }
- exitFail(`Input '${rawInput}' is not a valid JSON`);
+ errors.push(`${keyPath ? `${keyPath}: ` : ''}${issue.message}`);
}
+ return `${errors.join('\n').replace(/^/gm, errors.length > 1 ? '- ' : '')}`;
}
-function getConcurrentSkippingInput(name) {
- const rawInput = core.getInput(name, { required: true });
- if (rawInput.toLowerCase() === 'false') {
- return 'never'; // Backwards-compat
- }
- else if (rawInput.toLowerCase() === 'true') {
- return 'same_content'; // Backwards-compat
+/** Compose error message. */
+function composeErrorMessage({ title, error }) {
+ // Look for error message.
+ let message = 'Unknown error';
+ if (typeof error === 'string') {
+ message = error;
}
- if (concurrentSkippingOptions.includes(rawInput)) {
- return rawInput;
+ if (error instanceof Error && error.message) {
+ message = error.message;
}
- else {
- exitFail(`'${name}' must be one of ${concurrentSkippingOptions
- .map(option => `"${option}"`)
- .join(', ')}`);
+ // Prepend error message by the title.
+ if (title) {
+ return `${title}:\n${message.replace(/^/gm, '\t')}`;
}
+ return message;
}
-function getPathsFilterInput(name) {
- const rawInput = core.getInput(name);
- if (!rawInput) {
- return {};
- }
- try {
- const input = js_yaml_1.default.load(rawInput);
- // Assign default values to each entry
- const pathsFilter = {};
- for (const [key, value] of Object.entries(input)) {
- pathsFilter[key] = {
- paths: value.paths || [],
- paths_ignore: value.paths_ignore || [],
- backtracking: value.backtracking == null ? true : value.backtracking
- };
- }
- return pathsFilter;
- }
- catch (error) {
- if (error instanceof Error || typeof error === 'string') {
- core.error(error);
- }
- exitFail(`Input '${rawInput}' is invalid`);
+/** Immediately terminate the action with failing exit code. */
+function exitFail(error) {
+ if (error instanceof Error || typeof error == 'string') {
+ core.error(error);
}
+ process.exit(1);
}
main();
/***/ }),
-/***/ 7351:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+/***/ 2605:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.issue = exports.issueCommand = void 0;
-const os = __importStar(__nccwpck_require__(2037));
-const utils_1 = __nccwpck_require__(5278);
+exports.create = void 0;
+const artifact_client_1 = __nccwpck_require__(8802);
/**
- * Commands
- *
- * Command Format:
- * ::name key=value,key=value::message
- *
- * Examples:
- * ::warning::This is the message
- * ::set-env name=MY_VAR::some value
+ * Constructs an ArtifactClient
*/
-function issueCommand(command, properties, message) {
- const cmd = new Command(command, properties, message);
- process.stdout.write(cmd.toString() + os.EOL);
-}
-exports.issueCommand = issueCommand;
-function issue(name, message = '') {
- issueCommand(name, {}, message);
-}
-exports.issue = issue;
-const CMD_STRING = '::';
-class Command {
- constructor(command, properties, message) {
- if (!command) {
- command = 'missing.command';
- }
- this.command = command;
- this.properties = properties;
- this.message = message;
- }
- toString() {
- let cmdStr = CMD_STRING + this.command;
- if (this.properties && Object.keys(this.properties).length > 0) {
- cmdStr += ' ';
- let first = true;
- for (const key in this.properties) {
- if (this.properties.hasOwnProperty(key)) {
- const val = this.properties[key];
- if (val) {
- if (first) {
- first = false;
- }
- else {
- cmdStr += ',';
- }
- cmdStr += `${key}=${escapeProperty(val)}`;
- }
- }
- }
- }
- cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
- return cmdStr;
- }
-}
-function escapeData(s) {
- return utils_1.toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A');
-}
-function escapeProperty(s) {
- return utils_1.toCommandValue(s)
- .replace(/%/g, '%25')
- .replace(/\r/g, '%0D')
- .replace(/\n/g, '%0A')
- .replace(/:/g, '%3A')
- .replace(/,/g, '%2C');
+function create() {
+ return artifact_client_1.DefaultArtifactClient.create();
}
-//# sourceMappingURL=command.js.map
+exports.create = create;
+//# sourceMappingURL=artifact-client.js.map
/***/ }),
-/***/ 2186:
+/***/ 8802:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -667,321 +758,1037 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
-const command_1 = __nccwpck_require__(7351);
-const file_command_1 = __nccwpck_require__(717);
-const utils_1 = __nccwpck_require__(5278);
-const os = __importStar(__nccwpck_require__(2037));
-const path = __importStar(__nccwpck_require__(1017));
-const oidc_utils_1 = __nccwpck_require__(8041);
-/**
- * The code to exit an action
- */
-var ExitCode;
-(function (ExitCode) {
+exports.DefaultArtifactClient = void 0;
+const core = __importStar(__nccwpck_require__(2186));
+const upload_specification_1 = __nccwpck_require__(183);
+const upload_http_client_1 = __nccwpck_require__(4354);
+const utils_1 = __nccwpck_require__(6327);
+const path_and_artifact_name_validation_1 = __nccwpck_require__(7398);
+const download_http_client_1 = __nccwpck_require__(8538);
+const download_specification_1 = __nccwpck_require__(5686);
+const config_variables_1 = __nccwpck_require__(2222);
+const path_1 = __nccwpck_require__(1017);
+class DefaultArtifactClient {
/**
- * A code indicating that the action was successful
+ * Constructs a DefaultArtifactClient
*/
- ExitCode[ExitCode["Success"] = 0] = "Success";
+ static create() {
+ return new DefaultArtifactClient();
+ }
/**
- * A code indicating that the action was a failure
+ * Uploads an artifact
*/
- ExitCode[ExitCode["Failure"] = 1] = "Failure";
-})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
-//-----------------------------------------------------------------------
-// Variables
-//-----------------------------------------------------------------------
-/**
- * Sets env variable for this action and future actions in the job
- * @param name the name of the variable to set
- * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function exportVariable(name, val) {
- const convertedVal = utils_1.toCommandValue(val);
- process.env[name] = convertedVal;
- const filePath = process.env['GITHUB_ENV'] || '';
- if (filePath) {
- return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
+ uploadArtifact(name, files, rootDirectory, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ core.info(`Starting artifact upload
+For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`);
+ path_and_artifact_name_validation_1.checkArtifactName(name);
+ // Get specification for the files being uploaded
+ const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files);
+ const uploadResponse = {
+ artifactName: name,
+ artifactItems: [],
+ size: 0,
+ failedItems: []
+ };
+ const uploadHttpClient = new upload_http_client_1.UploadHttpClient();
+ if (uploadSpecification.length === 0) {
+ core.warning(`No files found that can be uploaded`);
+ }
+ else {
+ // Create an entry for the artifact in the file container
+ const response = yield uploadHttpClient.createArtifactInFileContainer(name, options);
+ if (!response.fileContainerResourceUrl) {
+ core.debug(response.toString());
+ throw new Error('No URL provided by the Artifact Service to upload an artifact to');
+ }
+ core.debug(`Upload Resource URL: ${response.fileContainerResourceUrl}`);
+ core.info(`Container for artifact "${name}" successfully created. Starting upload of file(s)`);
+ // Upload each of the files that were found concurrently
+ const uploadResult = yield uploadHttpClient.uploadArtifactToFileContainer(response.fileContainerResourceUrl, uploadSpecification, options);
+ // Update the size of the artifact to indicate we are done uploading
+ // The uncompressed size is used for display when downloading a zip of the artifact from the UI
+ core.info(`File upload process has finished. Finalizing the artifact upload`);
+ yield uploadHttpClient.patchArtifactSize(uploadResult.totalSize, name);
+ if (uploadResult.failedItems.length > 0) {
+ core.info(`Upload finished. There were ${uploadResult.failedItems.length} items that failed to upload`);
+ }
+ else {
+ core.info(`Artifact has been finalized. All files have been successfully uploaded!`);
+ }
+ core.info(`
+The raw size of all the files that were specified for upload is ${uploadResult.totalSize} bytes
+The size of all the files that were uploaded is ${uploadResult.uploadSize} bytes. This takes into account any gzip compression used to reduce the upload size, time and storage
+
+Note: The size of downloaded zips can differ significantly from the reported size. For more information see: https://github.com/actions/upload-artifact#zipped-artifact-downloads \r\n`);
+ uploadResponse.artifactItems = uploadSpecification.map(item => item.absoluteFilePath);
+ uploadResponse.size = uploadResult.uploadSize;
+ uploadResponse.failedItems = uploadResult.failedItems;
+ }
+ return uploadResponse;
+ });
+ }
+ downloadArtifact(name, path, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
+ const artifacts = yield downloadHttpClient.listArtifacts();
+ if (artifacts.count === 0) {
+ throw new Error(`Unable to find any artifacts for the associated workflow`);
+ }
+ const artifactToDownload = artifacts.value.find(artifact => {
+ return artifact.name === name;
+ });
+ if (!artifactToDownload) {
+ throw new Error(`Unable to find an artifact with the name: ${name}`);
+ }
+ const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl);
+ if (!path) {
+ path = config_variables_1.getWorkSpaceDirectory();
+ }
+ path = path_1.normalize(path);
+ path = path_1.resolve(path);
+ // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories
+ const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false);
+ if (downloadSpecification.filesToDownload.length === 0) {
+ core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`);
+ }
+ else {
+ // Create all necessary directories recursively before starting any download
+ yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
+ core.info('Directory structure has been setup for the artifact');
+ yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate);
+ yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
+ }
+ return {
+ artifactName: name,
+ downloadPath: downloadSpecification.rootDownloadLocation
+ };
+ });
+ }
+ downloadAllArtifacts(path) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const downloadHttpClient = new download_http_client_1.DownloadHttpClient();
+ const response = [];
+ const artifacts = yield downloadHttpClient.listArtifacts();
+ if (artifacts.count === 0) {
+ core.info('Unable to find any artifacts for the associated workflow');
+ return response;
+ }
+ if (!path) {
+ path = config_variables_1.getWorkSpaceDirectory();
+ }
+ path = path_1.normalize(path);
+ path = path_1.resolve(path);
+ let downloadedArtifacts = 0;
+ while (downloadedArtifacts < artifacts.count) {
+ const currentArtifactToDownload = artifacts.value[downloadedArtifacts];
+ downloadedArtifacts += 1;
+ core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`);
+ // Get container entries for the specific artifact
+ const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl);
+ const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true);
+ if (downloadSpecification.filesToDownload.length === 0) {
+ core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`);
+ }
+ else {
+ yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure);
+ yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate);
+ yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload);
+ }
+ response.push({
+ artifactName: currentArtifactToDownload.name,
+ downloadPath: downloadSpecification.rootDownloadLocation
+ });
+ }
+ return response;
+ });
}
- command_1.issueCommand('set-env', { name }, convertedVal);
-}
-exports.exportVariable = exportVariable;
-/**
- * Registers a secret which will get masked from logs
- * @param secret value of the secret
- */
-function setSecret(secret) {
- command_1.issueCommand('add-mask', {}, secret);
}
-exports.setSecret = setSecret;
+exports.DefaultArtifactClient = DefaultArtifactClient;
+//# sourceMappingURL=artifact-client.js.map
+
+/***/ }),
+
+/***/ 2222:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0;
+// The number of concurrent uploads that happens at the same time
+function getUploadFileConcurrency() {
+ return 2;
+}
+exports.getUploadFileConcurrency = getUploadFileConcurrency;
+// When uploading large files that can't be uploaded with a single http call, this controls
+// the chunk size that is used during upload
+function getUploadChunkSize() {
+ return 8 * 1024 * 1024; // 8 MB Chunks
+}
+exports.getUploadChunkSize = getUploadChunkSize;
+// The maximum number of retries that can be attempted before an upload or download fails
+function getRetryLimit() {
+ return 5;
+}
+exports.getRetryLimit = getRetryLimit;
+// With exponential backoff, the larger the retry count, the larger the wait time before another attempt
+// The retry multiplier controls by how much the backOff time increases depending on the number of retries
+function getRetryMultiplier() {
+ return 1.5;
+}
+exports.getRetryMultiplier = getRetryMultiplier;
+// The initial wait time if an upload or download fails and a retry is being attempted for the first time
+function getInitialRetryIntervalInMilliseconds() {
+ return 3000;
+}
+exports.getInitialRetryIntervalInMilliseconds = getInitialRetryIntervalInMilliseconds;
+// The number of concurrent downloads that happens at the same time
+function getDownloadFileConcurrency() {
+ return 2;
+}
+exports.getDownloadFileConcurrency = getDownloadFileConcurrency;
+function getRuntimeToken() {
+ const token = process.env['ACTIONS_RUNTIME_TOKEN'];
+ if (!token) {
+ throw new Error('Unable to get ACTIONS_RUNTIME_TOKEN env variable');
+ }
+ return token;
+}
+exports.getRuntimeToken = getRuntimeToken;
+function getRuntimeUrl() {
+ const runtimeUrl = process.env['ACTIONS_RUNTIME_URL'];
+ if (!runtimeUrl) {
+ throw new Error('Unable to get ACTIONS_RUNTIME_URL env variable');
+ }
+ return runtimeUrl;
+}
+exports.getRuntimeUrl = getRuntimeUrl;
+function getWorkFlowRunId() {
+ const workFlowRunId = process.env['GITHUB_RUN_ID'];
+ if (!workFlowRunId) {
+ throw new Error('Unable to get GITHUB_RUN_ID env variable');
+ }
+ return workFlowRunId;
+}
+exports.getWorkFlowRunId = getWorkFlowRunId;
+function getWorkSpaceDirectory() {
+ const workspaceDirectory = process.env['GITHUB_WORKSPACE'];
+ if (!workspaceDirectory) {
+ throw new Error('Unable to get GITHUB_WORKSPACE env variable');
+ }
+ return workspaceDirectory;
+}
+exports.getWorkSpaceDirectory = getWorkSpaceDirectory;
+function getRetentionDays() {
+ return process.env['GITHUB_RETENTION_DAYS'];
+}
+exports.getRetentionDays = getRetentionDays;
+//# sourceMappingURL=config-variables.js.map
+
+/***/ }),
+
+/***/ 3549:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
/**
- * Prepends inputPath to the PATH (for this action and future actions)
- * @param inputPath
+ * CRC64: cyclic redundancy check, 64-bits
+ *
+ * In order to validate that artifacts are not being corrupted over the wire, this redundancy check allows us to
+ * validate that there was no corruption during transmission. The implementation here is based on Go's hash/crc64 pkg,
+ * but without the slicing-by-8 optimization: https://cs.opensource.google/go/go/+/master:src/hash/crc64/crc64.go
+ *
+ * This implementation uses a pregenerated table based on 0x9A6C9329AC4BC9B5 as the polynomial, the same polynomial that
+ * is used for Azure Storage: https://github.com/Azure/azure-storage-net/blob/cbe605f9faa01bfc3003d75fc5a16b2eaccfe102/Lib/Common/Core/Util/Crc64.cs#L27
*/
-function addPath(inputPath) {
- const filePath = process.env['GITHUB_PATH'] || '';
- if (filePath) {
- file_command_1.issueFileCommand('PATH', inputPath);
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+// when transpile target is >= ES2020 (after dropping node 12) these can be changed to bigint literals - ts(2737)
+const PREGEN_POLY_TABLE = [
+ BigInt('0x0000000000000000'),
+ BigInt('0x7F6EF0C830358979'),
+ BigInt('0xFEDDE190606B12F2'),
+ BigInt('0x81B31158505E9B8B'),
+ BigInt('0xC962E5739841B68F'),
+ BigInt('0xB60C15BBA8743FF6'),
+ BigInt('0x37BF04E3F82AA47D'),
+ BigInt('0x48D1F42BC81F2D04'),
+ BigInt('0xA61CECB46814FE75'),
+ BigInt('0xD9721C7C5821770C'),
+ BigInt('0x58C10D24087FEC87'),
+ BigInt('0x27AFFDEC384A65FE'),
+ BigInt('0x6F7E09C7F05548FA'),
+ BigInt('0x1010F90FC060C183'),
+ BigInt('0x91A3E857903E5A08'),
+ BigInt('0xEECD189FA00BD371'),
+ BigInt('0x78E0FF3B88BE6F81'),
+ BigInt('0x078E0FF3B88BE6F8'),
+ BigInt('0x863D1EABE8D57D73'),
+ BigInt('0xF953EE63D8E0F40A'),
+ BigInt('0xB1821A4810FFD90E'),
+ BigInt('0xCEECEA8020CA5077'),
+ BigInt('0x4F5FFBD87094CBFC'),
+ BigInt('0x30310B1040A14285'),
+ BigInt('0xDEFC138FE0AA91F4'),
+ BigInt('0xA192E347D09F188D'),
+ BigInt('0x2021F21F80C18306'),
+ BigInt('0x5F4F02D7B0F40A7F'),
+ BigInt('0x179EF6FC78EB277B'),
+ BigInt('0x68F0063448DEAE02'),
+ BigInt('0xE943176C18803589'),
+ BigInt('0x962DE7A428B5BCF0'),
+ BigInt('0xF1C1FE77117CDF02'),
+ BigInt('0x8EAF0EBF2149567B'),
+ BigInt('0x0F1C1FE77117CDF0'),
+ BigInt('0x7072EF2F41224489'),
+ BigInt('0x38A31B04893D698D'),
+ BigInt('0x47CDEBCCB908E0F4'),
+ BigInt('0xC67EFA94E9567B7F'),
+ BigInt('0xB9100A5CD963F206'),
+ BigInt('0x57DD12C379682177'),
+ BigInt('0x28B3E20B495DA80E'),
+ BigInt('0xA900F35319033385'),
+ BigInt('0xD66E039B2936BAFC'),
+ BigInt('0x9EBFF7B0E12997F8'),
+ BigInt('0xE1D10778D11C1E81'),
+ BigInt('0x606216208142850A'),
+ BigInt('0x1F0CE6E8B1770C73'),
+ BigInt('0x8921014C99C2B083'),
+ BigInt('0xF64FF184A9F739FA'),
+ BigInt('0x77FCE0DCF9A9A271'),
+ BigInt('0x08921014C99C2B08'),
+ BigInt('0x4043E43F0183060C'),
+ BigInt('0x3F2D14F731B68F75'),
+ BigInt('0xBE9E05AF61E814FE'),
+ BigInt('0xC1F0F56751DD9D87'),
+ BigInt('0x2F3DEDF8F1D64EF6'),
+ BigInt('0x50531D30C1E3C78F'),
+ BigInt('0xD1E00C6891BD5C04'),
+ BigInt('0xAE8EFCA0A188D57D'),
+ BigInt('0xE65F088B6997F879'),
+ BigInt('0x9931F84359A27100'),
+ BigInt('0x1882E91B09FCEA8B'),
+ BigInt('0x67EC19D339C963F2'),
+ BigInt('0xD75ADABD7A6E2D6F'),
+ BigInt('0xA8342A754A5BA416'),
+ BigInt('0x29873B2D1A053F9D'),
+ BigInt('0x56E9CBE52A30B6E4'),
+ BigInt('0x1E383FCEE22F9BE0'),
+ BigInt('0x6156CF06D21A1299'),
+ BigInt('0xE0E5DE5E82448912'),
+ BigInt('0x9F8B2E96B271006B'),
+ BigInt('0x71463609127AD31A'),
+ BigInt('0x0E28C6C1224F5A63'),
+ BigInt('0x8F9BD7997211C1E8'),
+ BigInt('0xF0F5275142244891'),
+ BigInt('0xB824D37A8A3B6595'),
+ BigInt('0xC74A23B2BA0EECEC'),
+ BigInt('0x46F932EAEA507767'),
+ BigInt('0x3997C222DA65FE1E'),
+ BigInt('0xAFBA2586F2D042EE'),
+ BigInt('0xD0D4D54EC2E5CB97'),
+ BigInt('0x5167C41692BB501C'),
+ BigInt('0x2E0934DEA28ED965'),
+ BigInt('0x66D8C0F56A91F461'),
+ BigInt('0x19B6303D5AA47D18'),
+ BigInt('0x980521650AFAE693'),
+ BigInt('0xE76BD1AD3ACF6FEA'),
+ BigInt('0x09A6C9329AC4BC9B'),
+ BigInt('0x76C839FAAAF135E2'),
+ BigInt('0xF77B28A2FAAFAE69'),
+ BigInt('0x8815D86ACA9A2710'),
+ BigInt('0xC0C42C4102850A14'),
+ BigInt('0xBFAADC8932B0836D'),
+ BigInt('0x3E19CDD162EE18E6'),
+ BigInt('0x41773D1952DB919F'),
+ BigInt('0x269B24CA6B12F26D'),
+ BigInt('0x59F5D4025B277B14'),
+ BigInt('0xD846C55A0B79E09F'),
+ BigInt('0xA72835923B4C69E6'),
+ BigInt('0xEFF9C1B9F35344E2'),
+ BigInt('0x90973171C366CD9B'),
+ BigInt('0x1124202993385610'),
+ BigInt('0x6E4AD0E1A30DDF69'),
+ BigInt('0x8087C87E03060C18'),
+ BigInt('0xFFE938B633338561'),
+ BigInt('0x7E5A29EE636D1EEA'),
+ BigInt('0x0134D92653589793'),
+ BigInt('0x49E52D0D9B47BA97'),
+ BigInt('0x368BDDC5AB7233EE'),
+ BigInt('0xB738CC9DFB2CA865'),
+ BigInt('0xC8563C55CB19211C'),
+ BigInt('0x5E7BDBF1E3AC9DEC'),
+ BigInt('0x21152B39D3991495'),
+ BigInt('0xA0A63A6183C78F1E'),
+ BigInt('0xDFC8CAA9B3F20667'),
+ BigInt('0x97193E827BED2B63'),
+ BigInt('0xE877CE4A4BD8A21A'),
+ BigInt('0x69C4DF121B863991'),
+ BigInt('0x16AA2FDA2BB3B0E8'),
+ BigInt('0xF86737458BB86399'),
+ BigInt('0x8709C78DBB8DEAE0'),
+ BigInt('0x06BAD6D5EBD3716B'),
+ BigInt('0x79D4261DDBE6F812'),
+ BigInt('0x3105D23613F9D516'),
+ BigInt('0x4E6B22FE23CC5C6F'),
+ BigInt('0xCFD833A67392C7E4'),
+ BigInt('0xB0B6C36E43A74E9D'),
+ BigInt('0x9A6C9329AC4BC9B5'),
+ BigInt('0xE50263E19C7E40CC'),
+ BigInt('0x64B172B9CC20DB47'),
+ BigInt('0x1BDF8271FC15523E'),
+ BigInt('0x530E765A340A7F3A'),
+ BigInt('0x2C608692043FF643'),
+ BigInt('0xADD397CA54616DC8'),
+ BigInt('0xD2BD67026454E4B1'),
+ BigInt('0x3C707F9DC45F37C0'),
+ BigInt('0x431E8F55F46ABEB9'),
+ BigInt('0xC2AD9E0DA4342532'),
+ BigInt('0xBDC36EC59401AC4B'),
+ BigInt('0xF5129AEE5C1E814F'),
+ BigInt('0x8A7C6A266C2B0836'),
+ BigInt('0x0BCF7B7E3C7593BD'),
+ BigInt('0x74A18BB60C401AC4'),
+ BigInt('0xE28C6C1224F5A634'),
+ BigInt('0x9DE29CDA14C02F4D'),
+ BigInt('0x1C518D82449EB4C6'),
+ BigInt('0x633F7D4A74AB3DBF'),
+ BigInt('0x2BEE8961BCB410BB'),
+ BigInt('0x548079A98C8199C2'),
+ BigInt('0xD53368F1DCDF0249'),
+ BigInt('0xAA5D9839ECEA8B30'),
+ BigInt('0x449080A64CE15841'),
+ BigInt('0x3BFE706E7CD4D138'),
+ BigInt('0xBA4D61362C8A4AB3'),
+ BigInt('0xC52391FE1CBFC3CA'),
+ BigInt('0x8DF265D5D4A0EECE'),
+ BigInt('0xF29C951DE49567B7'),
+ BigInt('0x732F8445B4CBFC3C'),
+ BigInt('0x0C41748D84FE7545'),
+ BigInt('0x6BAD6D5EBD3716B7'),
+ BigInt('0x14C39D968D029FCE'),
+ BigInt('0x95708CCEDD5C0445'),
+ BigInt('0xEA1E7C06ED698D3C'),
+ BigInt('0xA2CF882D2576A038'),
+ BigInt('0xDDA178E515432941'),
+ BigInt('0x5C1269BD451DB2CA'),
+ BigInt('0x237C997575283BB3'),
+ BigInt('0xCDB181EAD523E8C2'),
+ BigInt('0xB2DF7122E51661BB'),
+ BigInt('0x336C607AB548FA30'),
+ BigInt('0x4C0290B2857D7349'),
+ BigInt('0x04D364994D625E4D'),
+ BigInt('0x7BBD94517D57D734'),
+ BigInt('0xFA0E85092D094CBF'),
+ BigInt('0x856075C11D3CC5C6'),
+ BigInt('0x134D926535897936'),
+ BigInt('0x6C2362AD05BCF04F'),
+ BigInt('0xED9073F555E26BC4'),
+ BigInt('0x92FE833D65D7E2BD'),
+ BigInt('0xDA2F7716ADC8CFB9'),
+ BigInt('0xA54187DE9DFD46C0'),
+ BigInt('0x24F29686CDA3DD4B'),
+ BigInt('0x5B9C664EFD965432'),
+ BigInt('0xB5517ED15D9D8743'),
+ BigInt('0xCA3F8E196DA80E3A'),
+ BigInt('0x4B8C9F413DF695B1'),
+ BigInt('0x34E26F890DC31CC8'),
+ BigInt('0x7C339BA2C5DC31CC'),
+ BigInt('0x035D6B6AF5E9B8B5'),
+ BigInt('0x82EE7A32A5B7233E'),
+ BigInt('0xFD808AFA9582AA47'),
+ BigInt('0x4D364994D625E4DA'),
+ BigInt('0x3258B95CE6106DA3'),
+ BigInt('0xB3EBA804B64EF628'),
+ BigInt('0xCC8558CC867B7F51'),
+ BigInt('0x8454ACE74E645255'),
+ BigInt('0xFB3A5C2F7E51DB2C'),
+ BigInt('0x7A894D772E0F40A7'),
+ BigInt('0x05E7BDBF1E3AC9DE'),
+ BigInt('0xEB2AA520BE311AAF'),
+ BigInt('0x944455E88E0493D6'),
+ BigInt('0x15F744B0DE5A085D'),
+ BigInt('0x6A99B478EE6F8124'),
+ BigInt('0x224840532670AC20'),
+ BigInt('0x5D26B09B16452559'),
+ BigInt('0xDC95A1C3461BBED2'),
+ BigInt('0xA3FB510B762E37AB'),
+ BigInt('0x35D6B6AF5E9B8B5B'),
+ BigInt('0x4AB846676EAE0222'),
+ BigInt('0xCB0B573F3EF099A9'),
+ BigInt('0xB465A7F70EC510D0'),
+ BigInt('0xFCB453DCC6DA3DD4'),
+ BigInt('0x83DAA314F6EFB4AD'),
+ BigInt('0x0269B24CA6B12F26'),
+ BigInt('0x7D0742849684A65F'),
+ BigInt('0x93CA5A1B368F752E'),
+ BigInt('0xECA4AAD306BAFC57'),
+ BigInt('0x6D17BB8B56E467DC'),
+ BigInt('0x12794B4366D1EEA5'),
+ BigInt('0x5AA8BF68AECEC3A1'),
+ BigInt('0x25C64FA09EFB4AD8'),
+ BigInt('0xA4755EF8CEA5D153'),
+ BigInt('0xDB1BAE30FE90582A'),
+ BigInt('0xBCF7B7E3C7593BD8'),
+ BigInt('0xC399472BF76CB2A1'),
+ BigInt('0x422A5673A732292A'),
+ BigInt('0x3D44A6BB9707A053'),
+ BigInt('0x759552905F188D57'),
+ BigInt('0x0AFBA2586F2D042E'),
+ BigInt('0x8B48B3003F739FA5'),
+ BigInt('0xF42643C80F4616DC'),
+ BigInt('0x1AEB5B57AF4DC5AD'),
+ BigInt('0x6585AB9F9F784CD4'),
+ BigInt('0xE436BAC7CF26D75F'),
+ BigInt('0x9B584A0FFF135E26'),
+ BigInt('0xD389BE24370C7322'),
+ BigInt('0xACE74EEC0739FA5B'),
+ BigInt('0x2D545FB4576761D0'),
+ BigInt('0x523AAF7C6752E8A9'),
+ BigInt('0xC41748D84FE75459'),
+ BigInt('0xBB79B8107FD2DD20'),
+ BigInt('0x3ACAA9482F8C46AB'),
+ BigInt('0x45A459801FB9CFD2'),
+ BigInt('0x0D75ADABD7A6E2D6'),
+ BigInt('0x721B5D63E7936BAF'),
+ BigInt('0xF3A84C3BB7CDF024'),
+ BigInt('0x8CC6BCF387F8795D'),
+ BigInt('0x620BA46C27F3AA2C'),
+ BigInt('0x1D6554A417C62355'),
+ BigInt('0x9CD645FC4798B8DE'),
+ BigInt('0xE3B8B53477AD31A7'),
+ BigInt('0xAB69411FBFB21CA3'),
+ BigInt('0xD407B1D78F8795DA'),
+ BigInt('0x55B4A08FDFD90E51'),
+ BigInt('0x2ADA5047EFEC8728')
+];
+class CRC64 {
+ constructor() {
+ this._crc = BigInt(0);
+ }
+ update(data) {
+ const buffer = typeof data === 'string' ? Buffer.from(data) : data;
+ let crc = CRC64.flip64Bits(this._crc);
+ for (const dataByte of buffer) {
+ const crcByte = Number(crc & BigInt(0xff));
+ crc = PREGEN_POLY_TABLE[crcByte ^ dataByte] ^ (crc >> BigInt(8));
+ }
+ this._crc = CRC64.flip64Bits(crc);
+ }
+ digest(encoding) {
+ switch (encoding) {
+ case 'hex':
+ return this._crc.toString(16).toUpperCase();
+ case 'base64':
+ return this.toBuffer().toString('base64');
+ default:
+ return this.toBuffer();
+ }
}
- else {
- command_1.issueCommand('add-path', {}, inputPath);
+ toBuffer() {
+ return Buffer.from([0, 8, 16, 24, 32, 40, 48, 56].map(s => Number((this._crc >> BigInt(s)) & BigInt(0xff))));
+ }
+ static flip64Bits(n) {
+ return (BigInt(1) << BigInt(64)) - BigInt(1) - n;
}
- process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
}
-exports.addPath = addPath;
+exports["default"] = CRC64;
+//# sourceMappingURL=crc64.js.map
+
+/***/ }),
+
+/***/ 8538:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.DownloadHttpClient = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const core = __importStar(__nccwpck_require__(2186));
+const zlib = __importStar(__nccwpck_require__(9796));
+const utils_1 = __nccwpck_require__(6327);
+const url_1 = __nccwpck_require__(7310);
+const status_reporter_1 = __nccwpck_require__(9081);
+const perf_hooks_1 = __nccwpck_require__(4074);
+const http_manager_1 = __nccwpck_require__(6527);
+const config_variables_1 = __nccwpck_require__(2222);
+const requestUtils_1 = __nccwpck_require__(755);
+class DownloadHttpClient {
+ constructor() {
+ this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download');
+ // downloads are usually significantly faster than uploads so display status information every second
+ this.statusReporter = new status_reporter_1.StatusReporter(1000);
+ }
+ /**
+ * Gets a list of all artifacts that are in a specific container
+ */
+ listArtifacts() {
+ return __awaiter(this, void 0, void 0, function* () {
+ const artifactUrl = utils_1.getArtifactUrl();
+ // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
+ const client = this.downloadHttpManager.getClient(0);
+ const headers = utils_1.getDownloadHeaders('application/json');
+ const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); }));
+ const body = yield response.readBody();
+ return JSON.parse(body);
+ });
+ }
+ /**
+ * Fetches a set of container items that describe the contents of an artifact
+ * @param artifactName the name of the artifact
+ * @param containerUrl the artifact container URL for the run
+ */
+ getContainerItems(artifactName, containerUrl) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // the itemPath search parameter controls which containers will be returned
+ const resourceUrl = new url_1.URL(containerUrl);
+ resourceUrl.searchParams.append('itemPath', artifactName);
+ // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
+ const client = this.downloadHttpManager.getClient(0);
+ const headers = utils_1.getDownloadHeaders('application/json');
+ const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); }));
+ const body = yield response.readBody();
+ return JSON.parse(body);
+ });
+ }
+ /**
+ * Concurrently downloads all the files that are part of an artifact
+ * @param downloadItems information about what items to download and where to save them
+ */
+ downloadSingleArtifact(downloadItems) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency();
+ // limit the number of files downloaded at a single time
+ core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`);
+ const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()];
+ let currentFile = 0;
+ let downloadedFiles = 0;
+ core.info(`Total number of files that will be downloaded: ${downloadItems.length}`);
+ this.statusReporter.setTotalNumberOfFilesToProcess(downloadItems.length);
+ this.statusReporter.start();
+ yield Promise.all(parallelDownloads.map((index) => __awaiter(this, void 0, void 0, function* () {
+ while (currentFile < downloadItems.length) {
+ const currentFileToDownload = downloadItems[currentFile];
+ currentFile += 1;
+ const startTime = perf_hooks_1.performance.now();
+ yield this.downloadIndividualFile(index, currentFileToDownload.sourceLocation, currentFileToDownload.targetPath);
+ if (core.isDebug()) {
+ core.debug(`File: ${++downloadedFiles}/${downloadItems.length}. ${currentFileToDownload.targetPath} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish downloading`);
+ }
+ this.statusReporter.incrementProcessedCount();
+ }
+ })))
+ .catch(error => {
+ throw new Error(`Unable to download the artifact: ${error}`);
+ })
+ .finally(() => {
+ this.statusReporter.stop();
+ // safety dispose all connections
+ this.downloadHttpManager.disposeAndReplaceAllClients();
+ });
+ });
+ }
+ /**
+ * Downloads an individual file
+ * @param httpClientIndex the index of the http client that is used to make all of the calls
+ * @param artifactLocation origin location where a file will be downloaded from
+ * @param downloadPath destination location for the file being downloaded
+ */
+ downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let retryCount = 0;
+ const retryLimit = config_variables_1.getRetryLimit();
+ let destinationStream = fs.createWriteStream(downloadPath);
+ const headers = utils_1.getDownloadHeaders('application/json', true, true);
+ // a single GET request is used to download a file
+ const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () {
+ const client = this.downloadHttpManager.getClient(httpClientIndex);
+ return yield client.get(artifactLocation, headers);
+ });
+ // check the response headers to determine if the file was compressed using gzip
+ const isGzip = (incomingHeaders) => {
+ return ('content-encoding' in incomingHeaders &&
+ incomingHeaders['content-encoding'] === 'gzip');
+ };
+ // Increments the current retry count and then checks if the retry limit has been reached
+ // If there have been too many retries, fail so the download stops. If there is a retryAfterValue value provided,
+ // it will be used
+ const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () {
+ retryCount++;
+ if (retryCount > retryLimit) {
+ return Promise.reject(new Error(`Retry limit has been reached. Unable to download ${artifactLocation}`));
+ }
+ else {
+ this.downloadHttpManager.disposeAndReplaceClient(httpClientIndex);
+ if (retryAfterValue) {
+ // Back off by waiting the specified time denoted by the retry-after header
+ core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`);
+ yield utils_1.sleep(retryAfterValue);
+ }
+ else {
+ // Back off using an exponential value that depends on the retry count
+ const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
+ core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`);
+ yield utils_1.sleep(backoffTime);
+ }
+ core.info(`Finished backoff for retry #${retryCount}, continuing with download`);
+ }
+ });
+ const isAllBytesReceived = (expected, received) => {
+ // be lenient, if any input is missing, assume success, i.e. not truncated
+ if (!expected ||
+ !received ||
+ process.env['ACTIONS_ARTIFACT_SKIP_DOWNLOAD_VALIDATION']) {
+ core.info('Skipping download validation.');
+ return true;
+ }
+ return parseInt(expected) === received;
+ };
+ const resetDestinationStream = (fileDownloadPath) => __awaiter(this, void 0, void 0, function* () {
+ destinationStream.close();
+ yield utils_1.rmFile(fileDownloadPath);
+ destinationStream = fs.createWriteStream(fileDownloadPath);
+ });
+ // keep trying to download a file until a retry limit has been reached
+ while (retryCount <= retryLimit) {
+ let response;
+ try {
+ response = yield makeDownloadRequest();
+ }
+ catch (error) {
+ // if an error is caught, it is usually indicative of a timeout so retry the download
+ core.info('An error occurred while attempting to download a file');
+ // eslint-disable-next-line no-console
+ console.log(error);
+ // increment the retryCount and use exponential backoff to wait before making the next request
+ yield backOff();
+ continue;
+ }
+ let forceRetry = false;
+ if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
+ // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string
+ // which can cause some gzip encoded data to be lost
+ // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents
+ try {
+ const isGzipped = isGzip(response.message.headers);
+ yield this.pipeResponseToFile(response, destinationStream, isGzipped);
+ if (isGzipped ||
+ isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) {
+ return;
+ }
+ else {
+ forceRetry = true;
+ }
+ }
+ catch (error) {
+ // retry on error, most likely streams were corrupted
+ forceRetry = true;
+ }
+ }
+ if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) {
+ core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`);
+ resetDestinationStream(downloadPath);
+ // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff
+ utils_1.isThrottledStatusCode(response.message.statusCode)
+ ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers))
+ : yield backOff();
+ }
+ else {
+ // Some unexpected response code, fail immediately and stop the download
+ utils_1.displayHttpDiagnostics(response);
+ return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`));
+ }
+ }
+ });
+ }
+ /**
+ * Pipes the response from downloading an individual file to the appropriate destination stream while decoding gzip content if necessary
+ * @param response the http response received when downloading a file
+ * @param destinationStream the stream where the file should be written to
+ * @param isGzip a boolean denoting if the content is compressed using gzip and if we need to decode it
+ */
+ pipeResponseToFile(response, destinationStream, isGzip) {
+ return __awaiter(this, void 0, void 0, function* () {
+ yield new Promise((resolve, reject) => {
+ if (isGzip) {
+ const gunzip = zlib.createGunzip();
+ response.message
+ .on('error', error => {
+ core.error(`An error occurred while attempting to read the response stream`);
+ gunzip.close();
+ destinationStream.close();
+ reject(error);
+ })
+ .pipe(gunzip)
+ .on('error', error => {
+ core.error(`An error occurred while attempting to decompress the response stream`);
+ destinationStream.close();
+ reject(error);
+ })
+ .pipe(destinationStream)
+ .on('close', () => {
+ resolve();
+ })
+ .on('error', error => {
+ core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
+ reject(error);
+ });
+ }
+ else {
+ response.message
+ .on('error', error => {
+ core.error(`An error occurred while attempting to read the response stream`);
+ destinationStream.close();
+ reject(error);
+ })
+ .pipe(destinationStream)
+ .on('close', () => {
+ resolve();
+ })
+ .on('error', error => {
+ core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`);
+ reject(error);
+ });
+ }
+ });
+ return;
+ });
+ }
+}
+exports.DownloadHttpClient = DownloadHttpClient;
+//# sourceMappingURL=download-http-client.js.map
+
+/***/ }),
+
+/***/ 5686:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getDownloadSpecification = void 0;
+const path = __importStar(__nccwpck_require__(1017));
/**
- * Gets the value of an input.
- * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
- * Returns an empty string if the value is not defined.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string
+ * Creates a specification for a set of files that will be downloaded
+ * @param artifactName the name of the artifact
+ * @param artifactEntries a set of container entries that describe that files that make up an artifact
+ * @param downloadPath the path where the artifact will be downloaded to
+ * @param includeRootDirectory specifies if there should be an extra directory (denoted by the artifact name) where the artifact files should be downloaded to
*/
-function getInput(name, options) {
- const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
- if (options && options.required && !val) {
- throw new Error(`Input required and not supplied: ${name}`);
+function getDownloadSpecification(artifactName, artifactEntries, downloadPath, includeRootDirectory) {
+ // use a set for the directory paths so that there are no duplicates
+ const directories = new Set();
+ const specifications = {
+ rootDownloadLocation: includeRootDirectory
+ ? path.join(downloadPath, artifactName)
+ : downloadPath,
+ directoryStructure: [],
+ emptyFilesToCreate: [],
+ filesToDownload: []
+ };
+ for (const entry of artifactEntries) {
+ // Ignore artifacts in the container that don't begin with the same name
+ if (entry.path.startsWith(`${artifactName}/`) ||
+ entry.path.startsWith(`${artifactName}\\`)) {
+ // normalize all separators to the local OS
+ const normalizedPathEntry = path.normalize(entry.path);
+ // entry.path always starts with the artifact name, if includeRootDirectory is false, remove the name from the beginning of the path
+ const filePath = path.join(downloadPath, includeRootDirectory
+ ? normalizedPathEntry
+ : normalizedPathEntry.replace(artifactName, ''));
+ // Case insensitive folder structure maintained in the backend, not every folder is created so the 'folder'
+ // itemType cannot be relied upon. The file must be used to determine the directory structure
+ if (entry.itemType === 'file') {
+ // Get the directories that we need to create from the filePath for each individual file
+ directories.add(path.dirname(filePath));
+ if (entry.fileLength === 0) {
+ // An empty file was uploaded, create the empty files locally so that no extra http calls are made
+ specifications.emptyFilesToCreate.push(filePath);
+ }
+ else {
+ specifications.filesToDownload.push({
+ sourceLocation: entry.contentLocation,
+ targetPath: filePath
+ });
+ }
+ }
+ }
}
- if (options && options.trimWhitespace === false) {
- return val;
+ specifications.directoryStructure = Array.from(directories);
+ return specifications;
+}
+exports.getDownloadSpecification = getDownloadSpecification;
+//# sourceMappingURL=download-specification.js.map
+
+/***/ }),
+
+/***/ 6527:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpManager = void 0;
+const utils_1 = __nccwpck_require__(6327);
+/**
+ * Used for managing http clients during either upload or download
+ */
+class HttpManager {
+ constructor(clientCount, userAgent) {
+ if (clientCount < 1) {
+ throw new Error('There must be at least one client');
+ }
+ this.userAgent = userAgent;
+ this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent));
+ }
+ getClient(index) {
+ return this.clients[index];
+ }
+ // client disposal is necessary if a keep-alive connection is used to properly close the connection
+ // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292
+ disposeAndReplaceClient(index) {
+ this.clients[index].dispose();
+ this.clients[index] = utils_1.createHttpClient(this.userAgent);
+ }
+ disposeAndReplaceAllClients() {
+ for (const [index] of this.clients.entries()) {
+ this.disposeAndReplaceClient(index);
+ }
}
- return val.trim();
}
-exports.getInput = getInput;
+exports.HttpManager = HttpManager;
+//# sourceMappingURL=http-manager.js.map
+
+/***/ }),
+
+/***/ 7398:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.checkArtifactFilePath = exports.checkArtifactName = void 0;
+const core_1 = __nccwpck_require__(2186);
/**
- * Gets the values of an multiline input. Each value is also trimmed.
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns string[]
+ * Invalid characters that cannot be in the artifact name or an uploaded file. Will be rejected
+ * from the server if attempted to be sent over. These characters are not allowed due to limitations with certain
+ * file systems such as NTFS. To maintain platform-agnostic behavior, all characters that are not supported by an
+ * individual filesystem/platform will not be supported on all fileSystems/platforms
*
+ * FilePaths can include characters such as \ and / which are not permitted in the artifact name alone
*/
-function getMultilineInput(name, options) {
- const inputs = getInput(name, options)
- .split('\n')
- .filter(x => x !== '');
- if (options && options.trimWhitespace === false) {
- return inputs;
+const invalidArtifactFilePathCharacters = new Map([
+ ['"', ' Double quote "'],
+ [':', ' Colon :'],
+ ['<', ' Less than <'],
+ ['>', ' Greater than >'],
+ ['|', ' Vertical bar |'],
+ ['*', ' Asterisk *'],
+ ['?', ' Question mark ?'],
+ ['\r', ' Carriage return \\r'],
+ ['\n', ' Line feed \\n']
+]);
+const invalidArtifactNameCharacters = new Map([
+ ...invalidArtifactFilePathCharacters,
+ ['\\', ' Backslash \\'],
+ ['/', ' Forward slash /']
+]);
+/**
+ * Scans the name of the artifact to make sure there are no illegal characters
+ */
+function checkArtifactName(name) {
+ if (!name) {
+ throw new Error(`Artifact name: ${name}, is incorrectly provided`);
+ }
+ for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactNameCharacters) {
+ if (name.includes(invalidCharacterKey)) {
+ throw new Error(`Artifact name is not valid: ${name}. Contains the following character: ${errorMessageForCharacter}
+
+Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()).toString()}
+
+These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`);
+ }
}
- return inputs.map(input => input.trim());
+ core_1.info(`Artifact name is valid!`);
}
-exports.getMultilineInput = getMultilineInput;
+exports.checkArtifactName = checkArtifactName;
/**
- * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
- * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
- * The return value is also in boolean type.
- * ref: https://yaml.org/spec/1.2/spec.html#id2804923
- *
- * @param name name of the input to get
- * @param options optional. See InputOptions.
- * @returns boolean
- */
-function getBooleanInput(name, options) {
- const trueValue = ['true', 'True', 'TRUE'];
- const falseValue = ['false', 'False', 'FALSE'];
- const val = getInput(name, options);
- if (trueValue.includes(val))
- return true;
- if (falseValue.includes(val))
- return false;
- throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
- `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
-}
-exports.getBooleanInput = getBooleanInput;
-/**
- * Sets the value of an output.
- *
- * @param name name of the output to set
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function setOutput(name, value) {
- const filePath = process.env['GITHUB_OUTPUT'] || '';
- if (filePath) {
- return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
- }
- process.stdout.write(os.EOL);
- command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
-}
-exports.setOutput = setOutput;
-/**
- * Enables or disables the echoing of commands into stdout for the rest of the step.
- * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
- *
- */
-function setCommandEcho(enabled) {
- command_1.issue('echo', enabled ? 'on' : 'off');
-}
-exports.setCommandEcho = setCommandEcho;
-//-----------------------------------------------------------------------
-// Results
-//-----------------------------------------------------------------------
-/**
- * Sets the action status to failed.
- * When the action exits it will be with an exit code of 1
- * @param message add error issue message
- */
-function setFailed(message) {
- process.exitCode = ExitCode.Failure;
- error(message);
-}
-exports.setFailed = setFailed;
-//-----------------------------------------------------------------------
-// Logging Commands
-//-----------------------------------------------------------------------
-/**
- * Gets whether Actions Step Debug is on or not
- */
-function isDebug() {
- return process.env['RUNNER_DEBUG'] === '1';
-}
-exports.isDebug = isDebug;
-/**
- * Writes debug message to user log
- * @param message debug message
- */
-function debug(message) {
- command_1.issueCommand('debug', {}, message);
-}
-exports.debug = debug;
-/**
- * Adds an error issue
- * @param message error issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function error(message, properties = {}) {
- command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-exports.error = error;
-/**
- * Adds a warning issue
- * @param message warning issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function warning(message, properties = {}) {
- command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-exports.warning = warning;
-/**
- * Adds a notice issue
- * @param message notice issue message. Errors will be converted to string via toString()
- * @param properties optional properties to add to the annotation.
- */
-function notice(message, properties = {}) {
- command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
-}
-exports.notice = notice;
-/**
- * Writes info to log with console.log.
- * @param message info message
- */
-function info(message) {
- process.stdout.write(message + os.EOL);
-}
-exports.info = info;
-/**
- * Begin an output group.
- *
- * Output until the next `groupEnd` will be foldable in this group
- *
- * @param name The name of the output group
- */
-function startGroup(name) {
- command_1.issue('group', name);
-}
-exports.startGroup = startGroup;
-/**
- * End an output group.
- */
-function endGroup() {
- command_1.issue('endgroup');
-}
-exports.endGroup = endGroup;
-/**
- * Wrap an asynchronous function call in a group.
- *
- * Returns the same type as the function itself.
- *
- * @param name The name of the group
- * @param fn The function to wrap in the group
+ * Scans the name of the filePath used to make sure there are no illegal characters
*/
-function group(name, fn) {
- return __awaiter(this, void 0, void 0, function* () {
- startGroup(name);
- let result;
- try {
- result = yield fn();
- }
- finally {
- endGroup();
+function checkArtifactFilePath(path) {
+ if (!path) {
+ throw new Error(`Artifact path: ${path}, is incorrectly provided`);
+ }
+ for (const [invalidCharacterKey, errorMessageForCharacter] of invalidArtifactFilePathCharacters) {
+ if (path.includes(invalidCharacterKey)) {
+ throw new Error(`Artifact path is not valid: ${path}. Contains the following character: ${errorMessageForCharacter}
+
+Invalid characters include: ${Array.from(invalidArtifactFilePathCharacters.values()).toString()}
+
+The following characters are not allowed in files that are uploaded due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.
+ `);
}
- return result;
- });
-}
-exports.group = group;
-//-----------------------------------------------------------------------
-// Wrapper action state
-//-----------------------------------------------------------------------
-/**
- * Saves state for current action, the state can only be retrieved by this action's post job execution.
- *
- * @param name name of the state to store
- * @param value value to store. Non-string values will be converted to a string via JSON.stringify
- */
-// eslint-disable-next-line @typescript-eslint/no-explicit-any
-function saveState(name, value) {
- const filePath = process.env['GITHUB_STATE'] || '';
- if (filePath) {
- return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
}
- command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
-}
-exports.saveState = saveState;
-/**
- * Gets the value of an state set by this action's main execution.
- *
- * @param name name of the state to get
- * @returns string
- */
-function getState(name) {
- return process.env[`STATE_${name}`] || '';
-}
-exports.getState = getState;
-function getIDToken(aud) {
- return __awaiter(this, void 0, void 0, function* () {
- return yield oidc_utils_1.OidcClient.getIDToken(aud);
- });
}
-exports.getIDToken = getIDToken;
-/**
- * Summary exports
- */
-var summary_1 = __nccwpck_require__(1327);
-Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
-/**
- * @deprecated use core.summary
- */
-var summary_2 = __nccwpck_require__(1327);
-Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
-/**
- * Path exports
- */
-var path_utils_1 = __nccwpck_require__(2981);
-Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
-Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
-Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
-//# sourceMappingURL=core.js.map
+exports.checkArtifactFilePath = checkArtifactFilePath;
+//# sourceMappingURL=path-and-artifact-name-validation.js.map
/***/ }),
-/***/ 717:
+/***/ 755:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
-// For internal use, subject to change.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
@@ -1001,51 +1808,6 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
-// We use any as a valid input type
-/* eslint-disable @typescript-eslint/no-explicit-any */
-const fs = __importStar(__nccwpck_require__(7147));
-const os = __importStar(__nccwpck_require__(2037));
-const uuid_1 = __nccwpck_require__(5840);
-const utils_1 = __nccwpck_require__(5278);
-function issueFileCommand(command, message) {
- const filePath = process.env[`GITHUB_${command}`];
- if (!filePath) {
- throw new Error(`Unable to find environment variable for file command ${command}`);
- }
- if (!fs.existsSync(filePath)) {
- throw new Error(`Missing file at path: ${filePath}`);
- }
- fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
- encoding: 'utf8'
- });
-}
-exports.issueFileCommand = issueFileCommand;
-function prepareKeyValueMessage(key, value) {
- const delimiter = `ghadelimiter_${uuid_1.v4()}`;
- const convertedValue = utils_1.toCommandValue(value);
- // These should realistically never happen, but just in case someone finds a
- // way to exploit uuid generation let's not allow keys or values that contain
- // the delimiter.
- if (key.includes(delimiter)) {
- throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
- }
- if (convertedValue.includes(delimiter)) {
- throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
- }
- return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
-}
-exports.prepareKeyValueMessage = prepareKeyValueMessage;
-//# sourceMappingURL=file-command.js.map
-
-/***/ }),
-
-/***/ 8041:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@@ -1056,76 +1818,127 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.OidcClient = void 0;
-const http_client_1 = __nccwpck_require__(6255);
-const auth_1 = __nccwpck_require__(5526);
-const core_1 = __nccwpck_require__(2186);
-class OidcClient {
- static createHttpClient(allowRetry = true, maxRetry = 10) {
- const requestOptions = {
- allowRetries: allowRetry,
- maxRetries: maxRetry
- };
- return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
- }
- static getRequestToken() {
- const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
- if (!token) {
- throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
- }
- return token;
- }
- static getIDTokenUrl() {
- const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
- if (!runtimeUrl) {
- throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
- }
- return runtimeUrl;
- }
- static getCall(id_token_url) {
- var _a;
- return __awaiter(this, void 0, void 0, function* () {
- const httpclient = OidcClient.createHttpClient();
- const res = yield httpclient
- .getJson(id_token_url)
- .catch(error => {
- throw new Error(`Failed to get ID Token. \n
- Error Code : ${error.statusCode}\n
- Error Message: ${error.result.message}`);
- });
- const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
- if (!id_token) {
- throw new Error('Response json body do not have ID Token field');
- }
- return id_token;
- });
- }
- static getIDToken(audience) {
- return __awaiter(this, void 0, void 0, function* () {
+exports.retryHttpClientRequest = exports.retry = void 0;
+const utils_1 = __nccwpck_require__(6327);
+const core = __importStar(__nccwpck_require__(2186));
+const config_variables_1 = __nccwpck_require__(2222);
+function retry(name, operation, customErrorMessages, maxAttempts) {
+ return __awaiter(this, void 0, void 0, function* () {
+ let response = undefined;
+ let statusCode = undefined;
+ let isRetryable = false;
+ let errorMessage = '';
+ let customErrorInformation = undefined;
+ let attempt = 1;
+ while (attempt <= maxAttempts) {
try {
- // New ID Token is requested from action service
- let id_token_url = OidcClient.getIDTokenUrl();
- if (audience) {
- const encodedAudience = encodeURIComponent(audience);
- id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+ response = yield operation();
+ statusCode = response.message.statusCode;
+ if (utils_1.isSuccessStatusCode(statusCode)) {
+ return response;
}
- core_1.debug(`ID token url is ${id_token_url}`);
- const id_token = yield OidcClient.getCall(id_token_url);
- core_1.setSecret(id_token);
- return id_token;
+ // Extra error information that we want to display if a particular response code is hit
+ if (statusCode) {
+ customErrorInformation = customErrorMessages.get(statusCode);
+ }
+ isRetryable = utils_1.isRetryableStatusCode(statusCode);
+ errorMessage = `Artifact service responded with ${statusCode}`;
}
catch (error) {
- throw new Error(`Error message: ${error.message}`);
+ isRetryable = true;
+ errorMessage = error.message;
}
- });
+ if (!isRetryable) {
+ core.info(`${name} - Error is not retryable`);
+ if (response) {
+ utils_1.displayHttpDiagnostics(response);
+ }
+ break;
+ }
+ core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`);
+ yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt));
+ attempt++;
+ }
+ if (response) {
+ utils_1.displayHttpDiagnostics(response);
+ }
+ if (customErrorInformation) {
+ throw Error(`${name} failed: ${customErrorInformation}`);
+ }
+ throw Error(`${name} failed: ${errorMessage}`);
+ });
+}
+exports.retry = retry;
+function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield retry(name, method, customErrorMessages, maxAttempts);
+ });
+}
+exports.retryHttpClientRequest = retryHttpClientRequest;
+//# sourceMappingURL=requestUtils.js.map
+
+/***/ }),
+
+/***/ 9081:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.StatusReporter = void 0;
+const core_1 = __nccwpck_require__(2186);
+/**
+ * Status Reporter that displays information about the progress/status of an artifact that is being uploaded or downloaded
+ *
+ * Variable display time that can be adjusted using the displayFrequencyInMilliseconds variable
+ * The total status of the upload/download gets displayed according to this value
+ * If there is a large file that is being uploaded, extra information about the individual status can also be displayed using the updateLargeFileStatus function
+ */
+class StatusReporter {
+ constructor(displayFrequencyInMilliseconds) {
+ this.totalNumberOfFilesToProcess = 0;
+ this.processedCount = 0;
+ this.largeFiles = new Map();
+ this.totalFileStatus = undefined;
+ this.displayFrequencyInMilliseconds = displayFrequencyInMilliseconds;
+ }
+ setTotalNumberOfFilesToProcess(fileTotal) {
+ this.totalNumberOfFilesToProcess = fileTotal;
+ this.processedCount = 0;
+ }
+ start() {
+ // displays information about the total upload/download status
+ this.totalFileStatus = setInterval(() => {
+ // display 1 decimal place without any rounding
+ const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess);
+ core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`);
+ }, this.displayFrequencyInMilliseconds);
+ }
+ // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload
+ updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) {
+ // display 1 decimal place without any rounding
+ const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize);
+ core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`);
+ }
+ stop() {
+ if (this.totalFileStatus) {
+ clearInterval(this.totalFileStatus);
+ }
+ }
+ incrementProcessedCount() {
+ this.processedCount++;
+ }
+ formatPercentage(numerator, denominator) {
+ // toFixed() rounds, so use extra precision to display accurate information even though 4 decimal places are not displayed
+ return ((numerator / denominator) * 100).toFixed(4).toString();
}
}
-exports.OidcClient = OidcClient;
-//# sourceMappingURL=oidc-utils.js.map
+exports.StatusReporter = StatusReporter;
+//# sourceMappingURL=status-reporter.js.map
/***/ }),
-/***/ 2981:
+/***/ 606:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1149,52 +1962,134 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __asyncValues = (this && this.__asyncValues) || function (o) {
+ if (!Symbol.asyncIterator) throw new TypeError("Symbol.asyncIterator is not defined.");
+ var m = o[Symbol.asyncIterator], i;
+ return m ? m.call(o) : (o = typeof __values === "function" ? __values(o) : o[Symbol.iterator](), i = {}, verb("next"), verb("throw"), verb("return"), i[Symbol.asyncIterator] = function () { return this; }, i);
+ function verb(n) { i[n] = o[n] && function (v) { return new Promise(function (resolve, reject) { v = o[n](v), settle(resolve, reject, v.done, v.value); }); }; }
+ function settle(resolve, reject, d, v) { Promise.resolve(v).then(function(v) { resolve({ value: v, done: d }); }, reject); }
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
-const path = __importStar(__nccwpck_require__(1017));
+exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const zlib = __importStar(__nccwpck_require__(9796));
+const util_1 = __nccwpck_require__(3837);
+const stat = util_1.promisify(fs.stat);
/**
- * toPosixPath converts the given path to the posix form. On Windows, \\ will be
- * replaced with /.
- *
- * @param pth. Path to transform.
- * @return string Posix path.
+ * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip
+ * files then will just waste a lot of time before ultimately being discarded (especially for very large files).
+ * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is
*/
-function toPosixPath(pth) {
- return pth.replace(/[\\]/g, '/');
-}
-exports.toPosixPath = toPosixPath;
+const gzipExemptFileExtensions = [
+ '.gzip',
+ '.zip',
+ '.tar.lz',
+ '.tar.gz',
+ '.tar.bz2',
+ '.7z'
+];
/**
- * toWin32Path converts the given path to the win32 form. On Linux, / will be
- * replaced with \\.
- *
- * @param pth. Path to transform.
- * @return string Win32 path.
+ * Creates a Gzip compressed file of an original file at the provided temporary filepath location
+ * @param {string} originalFilePath filepath of whatever will be compressed. The original file will be unmodified
+ * @param {string} tempFilePath the location of where the Gzip file will be created
+ * @returns the size of gzip file that gets created
*/
-function toWin32Path(pth) {
- return pth.replace(/[/]/g, '\\');
+function createGZipFileOnDisk(originalFilePath, tempFilePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ for (const gzipExemptExtension of gzipExemptFileExtensions) {
+ if (originalFilePath.endsWith(gzipExemptExtension)) {
+ // return a really large number so that the original file gets uploaded
+ return Number.MAX_SAFE_INTEGER;
+ }
+ }
+ return new Promise((resolve, reject) => {
+ const inputStream = fs.createReadStream(originalFilePath);
+ const gzip = zlib.createGzip();
+ const outputStream = fs.createWriteStream(tempFilePath);
+ inputStream.pipe(gzip).pipe(outputStream);
+ outputStream.on('finish', () => __awaiter(this, void 0, void 0, function* () {
+ // wait for stream to finish before calculating the size which is needed as part of the Content-Length header when starting an upload
+ const size = (yield stat(tempFilePath)).size;
+ resolve(size);
+ }));
+ outputStream.on('error', error => {
+ // eslint-disable-next-line no-console
+ console.log(error);
+ reject;
+ });
+ });
+ });
}
-exports.toWin32Path = toWin32Path;
+exports.createGZipFileOnDisk = createGZipFileOnDisk;
/**
- * toPlatformPath converts the given path to a platform-specific path. It does
- * this by replacing instances of / and \ with the platform-specific path
- * separator.
- *
- * @param pth The path to platformize.
- * @return string The platform-specific path.
+ * Creates a GZip file in memory using a buffer. Should be used for smaller files to reduce disk I/O
+ * @param originalFilePath the path to the original file that is being GZipped
+ * @returns a buffer with the GZip file
*/
-function toPlatformPath(pth) {
- return pth.replace(/[/\\]/g, path.sep);
+function createGZipFileInBuffer(originalFilePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ var e_1, _a;
+ const inputStream = fs.createReadStream(originalFilePath);
+ const gzip = zlib.createGzip();
+ inputStream.pipe(gzip);
+ // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043
+ const chunks = [];
+ try {
+ for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) {
+ const chunk = gzip_1_1.value;
+ chunks.push(chunk);
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ resolve(Buffer.concat(chunks));
+ }));
+ });
}
-exports.toPlatformPath = toPlatformPath;
-//# sourceMappingURL=path-utils.js.map
+exports.createGZipFileInBuffer = createGZipFileInBuffer;
+//# sourceMappingURL=upload-gzip.js.map
/***/ }),
-/***/ 1327:
+/***/ 4354:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@@ -1205,390 +2100,796 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
-const os_1 = __nccwpck_require__(2037);
-const fs_1 = __nccwpck_require__(7147);
-const { access, appendFile, writeFile } = fs_1.promises;
-exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
-exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
-class Summary {
+exports.UploadHttpClient = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const core = __importStar(__nccwpck_require__(2186));
+const tmp = __importStar(__nccwpck_require__(8065));
+const stream = __importStar(__nccwpck_require__(2781));
+const utils_1 = __nccwpck_require__(6327);
+const config_variables_1 = __nccwpck_require__(2222);
+const util_1 = __nccwpck_require__(3837);
+const url_1 = __nccwpck_require__(7310);
+const perf_hooks_1 = __nccwpck_require__(4074);
+const status_reporter_1 = __nccwpck_require__(9081);
+const http_client_1 = __nccwpck_require__(6255);
+const http_manager_1 = __nccwpck_require__(6527);
+const upload_gzip_1 = __nccwpck_require__(606);
+const requestUtils_1 = __nccwpck_require__(755);
+const stat = util_1.promisify(fs.stat);
+class UploadHttpClient {
constructor() {
- this._buffer = '';
+ this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload');
+ this.statusReporter = new status_reporter_1.StatusReporter(10000);
}
/**
- * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
- * Also checks r/w permissions.
- *
- * @returns step summary file path
+ * Creates a file container for the new artifact in the remote blob storage/file service
+ * @param {string} artifactName Name of the artifact being created
+ * @returns The response from the Artifact Service if the file container was successfully created
*/
- filePath() {
+ createArtifactInFileContainer(artifactName, options) {
return __awaiter(this, void 0, void 0, function* () {
- if (this._filePath) {
- return this._filePath;
- }
- const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
- if (!pathFromEnv) {
- throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
- }
- try {
- yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
- }
- catch (_a) {
- throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
+ const parameters = {
+ Type: 'actions_storage',
+ Name: artifactName
+ };
+ // calculate retention period
+ if (options && options.retentionDays) {
+ const maxRetentionStr = config_variables_1.getRetentionDays();
+ parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr);
}
- this._filePath = pathFromEnv;
- return this._filePath;
+ const data = JSON.stringify(parameters, null, 2);
+ const artifactUrl = utils_1.getArtifactUrl();
+ // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
+ const client = this.uploadHttpManager.getClient(0);
+ const headers = utils_1.getUploadHeaders('application/json', false);
+ // Extra information to display when a particular HTTP code is returned
+ // If a 403 is returned when trying to create a file container, the customer has exceeded
+ // their storage quota so no new artifact containers can be created
+ const customErrorMessages = new Map([
+ [
+ http_client_1.HttpCodes.Forbidden,
+ 'Artifact storage quota has been hit. Unable to upload any new artifacts'
+ ],
+ [
+ http_client_1.HttpCodes.BadRequest,
+ `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}`
+ ]
+ ]);
+ const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages);
+ const body = yield response.readBody();
+ return JSON.parse(body);
});
}
/**
- * Wraps content in an HTML tag, adding any HTML attributes
- *
- * @param {string} tag HTML tag to wrap
- * @param {string | null} content content within the tag
- * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
- *
- * @returns {string} content wrapped in HTML element
- */
- wrap(tag, content, attrs = {}) {
- const htmlAttrs = Object.entries(attrs)
- .map(([key, value]) => ` ${key}="${value}"`)
- .join('');
- if (!content) {
- return `<${tag}${htmlAttrs}>`;
- }
- return `<${tag}${htmlAttrs}>${content}${tag}>`;
- }
- /**
- * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
- *
- * @param {SummaryWriteOptions} [options] (optional) options for write operation
- *
- * @returns {Promise} summary instance
+ * Concurrently upload all of the files in chunks
+ * @param {string} uploadUrl Base Url for the artifact that was created
+ * @param {SearchResult[]} filesToUpload A list of information about the files being uploaded
+ * @returns The size of all the files uploaded in bytes
*/
- write(options) {
+ uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) {
return __awaiter(this, void 0, void 0, function* () {
- const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
- const filePath = yield this.filePath();
- const writeFunc = overwrite ? writeFile : appendFile;
- yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
- return this.emptyBuffer();
+ const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency();
+ const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize();
+ core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`);
+ const parameters = [];
+ // by default, file uploads will continue if there is an error unless specified differently in the options
+ let continueOnError = true;
+ if (options) {
+ if (options.continueOnError === false) {
+ continueOnError = false;
+ }
+ }
+ // prepare the necessary parameters to upload all the files
+ for (const file of filesToUpload) {
+ const resourceUrl = new url_1.URL(uploadUrl);
+ resourceUrl.searchParams.append('itemPath', file.uploadFilePath);
+ parameters.push({
+ file: file.absoluteFilePath,
+ resourceUrl: resourceUrl.toString(),
+ maxChunkSize: MAX_CHUNK_SIZE,
+ continueOnError
+ });
+ }
+ const parallelUploads = [...new Array(FILE_CONCURRENCY).keys()];
+ const failedItemsToReport = [];
+ let currentFile = 0;
+ let completedFiles = 0;
+ let uploadFileSize = 0;
+ let totalFileSize = 0;
+ let abortPendingFileUploads = false;
+ this.statusReporter.setTotalNumberOfFilesToProcess(filesToUpload.length);
+ this.statusReporter.start();
+ // only allow a certain amount of files to be uploaded at once, this is done to reduce potential errors
+ yield Promise.all(parallelUploads.map((index) => __awaiter(this, void 0, void 0, function* () {
+ while (currentFile < filesToUpload.length) {
+ const currentFileParameters = parameters[currentFile];
+ currentFile += 1;
+ if (abortPendingFileUploads) {
+ failedItemsToReport.push(currentFileParameters.file);
+ continue;
+ }
+ const startTime = perf_hooks_1.performance.now();
+ const uploadFileResult = yield this.uploadFileAsync(index, currentFileParameters);
+ if (core.isDebug()) {
+ core.debug(`File: ${++completedFiles}/${filesToUpload.length}. ${currentFileParameters.file} took ${(perf_hooks_1.performance.now() - startTime).toFixed(3)} milliseconds to finish upload`);
+ }
+ uploadFileSize += uploadFileResult.successfulUploadSize;
+ totalFileSize += uploadFileResult.totalSize;
+ if (uploadFileResult.isSuccess === false) {
+ failedItemsToReport.push(currentFileParameters.file);
+ if (!continueOnError) {
+ // fail fast
+ core.error(`aborting artifact upload`);
+ abortPendingFileUploads = true;
+ }
+ }
+ this.statusReporter.incrementProcessedCount();
+ }
+ })));
+ this.statusReporter.stop();
+ // done uploading, safety dispose all connections
+ this.uploadHttpManager.disposeAndReplaceAllClients();
+ core.info(`Total size of all the files uploaded is ${uploadFileSize} bytes`);
+ return {
+ uploadSize: uploadFileSize,
+ totalSize: totalFileSize,
+ failedItems: failedItemsToReport
+ };
});
}
/**
- * Clears the summary buffer and wipes the summary file
- *
- * @returns {Summary} summary instance
+ * Asynchronously uploads a file. The file is compressed and uploaded using GZip if it is determined to save space.
+ * If the upload file is bigger than the max chunk size it will be uploaded via multiple calls
+ * @param {number} httpClientIndex The index of the httpClient that is being used to make all of the calls
+ * @param {UploadFileParameters} parameters Information about the file that needs to be uploaded
+ * @returns The size of the file that was uploaded in bytes along with any failed uploads
*/
- clear() {
+ uploadFileAsync(httpClientIndex, parameters) {
return __awaiter(this, void 0, void 0, function* () {
- return this.emptyBuffer().write({ overwrite: true });
+ const fileStat = yield stat(parameters.file);
+ const totalFileSize = fileStat.size;
+ const isFIFO = fileStat.isFIFO();
+ let offset = 0;
+ let isUploadSuccessful = true;
+ let failedChunkSizes = 0;
+ let uploadFileSize = 0;
+ let isGzip = true;
+ // the file that is being uploaded is less than 64k in size to increase throughput and to minimize disk I/O
+ // for creating a new GZip file, an in-memory buffer is used for compression
+ // with named pipes the file size is reported as zero in that case don't read the file in memory
+ if (!isFIFO && totalFileSize < 65536) {
+ core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`);
+ const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file);
+ // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in,
+ // it will not properly get reset to the start of the stream if a chunk upload needs to be retried
+ let openUploadStream;
+ if (totalFileSize < buffer.byteLength) {
+ // compression did not help with reducing the size, use a readable stream from the original file for upload
+ core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
+ openUploadStream = () => fs.createReadStream(parameters.file);
+ isGzip = false;
+ uploadFileSize = totalFileSize;
+ }
+ else {
+ // create a readable stream using a PassThrough stream that is both readable and writable
+ core.debug(`A gzip file created for ${parameters.file} helped with reducing the size of the original file. The file will be uploaded using gzip.`);
+ openUploadStream = () => {
+ const passThrough = new stream.PassThrough();
+ passThrough.end(buffer);
+ return passThrough;
+ };
+ uploadFileSize = buffer.byteLength;
+ }
+ const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, openUploadStream, 0, uploadFileSize - 1, uploadFileSize, isGzip, totalFileSize);
+ if (!result) {
+ // chunk failed to upload
+ isUploadSuccessful = false;
+ failedChunkSizes += uploadFileSize;
+ core.warning(`Aborting upload for ${parameters.file} due to failure`);
+ }
+ return {
+ isSuccess: isUploadSuccessful,
+ successfulUploadSize: uploadFileSize - failedChunkSizes,
+ totalSize: totalFileSize
+ };
+ }
+ else {
+ // the file that is being uploaded is greater than 64k in size, a temporary file gets created on disk using the
+ // npm tmp-promise package and this file gets used to create a GZipped file
+ const tempFile = yield tmp.file();
+ core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`);
+ // create a GZip file of the original file being uploaded, the original file should not be modified in any way
+ uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path);
+ let uploadFilePath = tempFile.path;
+ // compression did not help with size reduction, use the original file for upload and delete the temp GZip file
+ // for named pipes totalFileSize is zero, this assumes compression did help
+ if (!isFIFO && totalFileSize < uploadFileSize) {
+ core.debug(`The gzip file created for ${parameters.file} did not help with reducing the size of the file. The original file will be uploaded as-is`);
+ uploadFileSize = totalFileSize;
+ uploadFilePath = parameters.file;
+ isGzip = false;
+ }
+ else {
+ core.debug(`The gzip file created for ${parameters.file} is smaller than the original file. The file will be uploaded using gzip.`);
+ }
+ let abortFileUpload = false;
+ // upload only a single chunk at a time
+ while (offset < uploadFileSize) {
+ const chunkSize = Math.min(uploadFileSize - offset, parameters.maxChunkSize);
+ const startChunkIndex = offset;
+ const endChunkIndex = offset + chunkSize - 1;
+ offset += parameters.maxChunkSize;
+ if (abortFileUpload) {
+ // if we don't want to continue in the event of an error, any pending upload chunks will be marked as failed
+ failedChunkSizes += chunkSize;
+ continue;
+ }
+ const result = yield this.uploadChunk(httpClientIndex, parameters.resourceUrl, () => fs.createReadStream(uploadFilePath, {
+ start: startChunkIndex,
+ end: endChunkIndex,
+ autoClose: false
+ }), startChunkIndex, endChunkIndex, uploadFileSize, isGzip, totalFileSize);
+ if (!result) {
+ // Chunk failed to upload, report as failed and do not continue uploading any more chunks for the file. It is possible that part of a chunk was
+ // successfully uploaded so the server may report a different size for what was uploaded
+ isUploadSuccessful = false;
+ failedChunkSizes += chunkSize;
+ core.warning(`Aborting upload for ${parameters.file} due to failure`);
+ abortFileUpload = true;
+ }
+ else {
+ // if an individual file is greater than 8MB (1024*1024*8) in size, display extra information about the upload status
+ if (uploadFileSize > 8388608) {
+ this.statusReporter.updateLargeFileStatus(parameters.file, startChunkIndex, endChunkIndex, uploadFileSize);
+ }
+ }
+ }
+ // Delete the temporary file that was created as part of the upload. If the temp file does not get manually deleted by
+ // calling cleanup, it gets removed when the node process exits. For more info see: https://www.npmjs.com/package/tmp-promise#about
+ core.debug(`deleting temporary gzip file ${tempFile.path}`);
+ yield tempFile.cleanup();
+ return {
+ isSuccess: isUploadSuccessful,
+ successfulUploadSize: uploadFileSize - failedChunkSizes,
+ totalSize: totalFileSize
+ };
+ }
});
}
/**
- * Returns the current summary buffer as a string
- *
- * @returns {string} string of summary buffer
+ * Uploads a chunk of an individual file to the specified resourceUrl. If the upload fails and the status code
+ * indicates a retryable status, we try to upload the chunk as well
+ * @param {number} httpClientIndex The index of the httpClient being used to make all the necessary calls
+ * @param {string} resourceUrl Url of the resource that the chunk will be uploaded to
+ * @param {NodeJS.ReadableStream} openStream Stream of the file that will be uploaded
+ * @param {number} start Starting byte index of file that the chunk belongs to
+ * @param {number} end Ending byte index of file that the chunk belongs to
+ * @param {number} uploadFileSize Total size of the file in bytes that is being uploaded
+ * @param {boolean} isGzip Denotes if we are uploading a Gzip compressed stream
+ * @param {number} totalFileSize Original total size of the file that is being uploaded
+ * @returns if the chunk was successfully uploaded
*/
- stringify() {
- return this._buffer;
- }
- /**
- * If the summary buffer is empty
- *
- * @returns {boolen} true if the buffer is empty
- */
- isEmptyBuffer() {
- return this._buffer.length === 0;
- }
- /**
- * Resets the summary buffer without writing to summary file
- *
- * @returns {Summary} summary instance
- */
- emptyBuffer() {
- this._buffer = '';
- return this;
- }
- /**
- * Adds raw text to the summary buffer
- *
- * @param {string} text content to add
- * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
- *
- * @returns {Summary} summary instance
- */
- addRaw(text, addEOL = false) {
- this._buffer += text;
- return addEOL ? this.addEOL() : this;
- }
- /**
- * Adds the operating system-specific end-of-line marker to the buffer
- *
- * @returns {Summary} summary instance
- */
- addEOL() {
- return this.addRaw(os_1.EOL);
- }
- /**
- * Adds an HTML codeblock to the summary buffer
- *
- * @param {string} code content to render within fenced code block
- * @param {string} lang (optional) language to syntax highlight code
- *
- * @returns {Summary} summary instance
- */
- addCodeBlock(code, lang) {
- const attrs = Object.assign({}, (lang && { lang }));
- const element = this.wrap('pre', this.wrap('code', code), attrs);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML list to the summary buffer
- *
- * @param {string[]} items list of items to render
- * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
- *
- * @returns {Summary} summary instance
- */
- addList(items, ordered = false) {
- const tag = ordered ? 'ol' : 'ul';
- const listItems = items.map(item => this.wrap('li', item)).join('');
- const element = this.wrap(tag, listItems);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML table to the summary buffer
- *
- * @param {SummaryTableCell[]} rows table rows
- *
- * @returns {Summary} summary instance
- */
- addTable(rows) {
- const tableBody = rows
- .map(row => {
- const cells = row
- .map(cell => {
- if (typeof cell === 'string') {
- return this.wrap('td', cell);
+ uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) {
+ return __awaiter(this, void 0, void 0, function* () {
+ // open a new stream and read it to compute the digest
+ const digest = yield utils_1.digestForStream(openStream());
+ // prepare all the necessary headers before making any http call
+ const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest);
+ const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () {
+ const client = this.uploadHttpManager.getClient(httpClientIndex);
+ return yield client.sendStream('PUT', resourceUrl, openStream(), headers);
+ });
+ let retryCount = 0;
+ const retryLimit = config_variables_1.getRetryLimit();
+ // Increments the current retry count and then checks if the retry limit has been reached
+ // If there have been too many retries, fail so the download stops
+ const incrementAndCheckRetryLimit = (response) => {
+ retryCount++;
+ if (retryCount > retryLimit) {
+ if (response) {
+ utils_1.displayHttpDiagnostics(response);
+ }
+ core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`);
+ return true;
}
- const { header, data, colspan, rowspan } = cell;
- const tag = header ? 'th' : 'td';
- const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
- return this.wrap(tag, data, attrs);
- })
- .join('');
- return this.wrap('tr', cells);
- })
- .join('');
- const element = this.wrap('table', tableBody);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds a collapsable HTML details element to the summary buffer
- *
- * @param {string} label text for the closed state
- * @param {string} content collapsable content
- *
- * @returns {Summary} summary instance
- */
- addDetails(label, content) {
- const element = this.wrap('details', this.wrap('summary', label) + content);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML image tag to the summary buffer
- *
- * @param {string} src path to the image you to embed
- * @param {string} alt text description of the image
- * @param {SummaryImageOptions} options (optional) addition image attributes
- *
- * @returns {Summary} summary instance
- */
- addImage(src, alt, options) {
- const { width, height } = options || {};
- const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
- const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML section heading element
- *
- * @param {string} text heading text
- * @param {number | string} [level=1] (optional) the heading level, default: 1
- *
- * @returns {Summary} summary instance
- */
- addHeading(text, level) {
- const tag = `h${level}`;
- const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
- ? tag
- : 'h1';
- const element = this.wrap(allowedTag, text);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML thematic break (
) to the summary buffer
- *
- * @returns {Summary} summary instance
- */
- addSeparator() {
- const element = this.wrap('hr', null);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML line break (
) to the summary buffer
- *
- * @returns {Summary} summary instance
- */
- addBreak() {
- const element = this.wrap('br', null);
- return this.addRaw(element).addEOL();
- }
- /**
- * Adds an HTML blockquote to the summary buffer
- *
- * @param {string} text quote text
- * @param {string} cite (optional) citation url
- *
- * @returns {Summary} summary instance
- */
- addQuote(text, cite) {
- const attrs = Object.assign({}, (cite && { cite }));
- const element = this.wrap('blockquote', text, attrs);
- return this.addRaw(element).addEOL();
+ return false;
+ };
+ const backOff = (retryAfterValue) => __awaiter(this, void 0, void 0, function* () {
+ this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex);
+ if (retryAfterValue) {
+ core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`);
+ yield utils_1.sleep(retryAfterValue);
+ }
+ else {
+ const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount);
+ core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`);
+ yield utils_1.sleep(backoffTime);
+ }
+ core.info(`Finished backoff for retry #${retryCount}, continuing with upload`);
+ return;
+ });
+ // allow for failed chunks to be retried multiple times
+ while (retryCount <= retryLimit) {
+ let response;
+ try {
+ response = yield uploadChunkRequest();
+ }
+ catch (error) {
+ // if an error is caught, it is usually indicative of a timeout so retry the upload
+ core.info(`An error has been caught http-client index ${httpClientIndex}, retrying the upload`);
+ // eslint-disable-next-line no-console
+ console.log(error);
+ if (incrementAndCheckRetryLimit()) {
+ return false;
+ }
+ yield backOff();
+ continue;
+ }
+ // Always read the body of the response. There is potential for a resource leak if the body is not read which will
+ // result in the connection remaining open along with unintended consequences when trying to dispose of the client
+ yield response.readBody();
+ if (utils_1.isSuccessStatusCode(response.message.statusCode)) {
+ return true;
+ }
+ else if (utils_1.isRetryableStatusCode(response.message.statusCode)) {
+ core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`);
+ if (incrementAndCheckRetryLimit(response)) {
+ return false;
+ }
+ utils_1.isThrottledStatusCode(response.message.statusCode)
+ ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers))
+ : yield backOff();
+ }
+ else {
+ core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`);
+ utils_1.displayHttpDiagnostics(response);
+ return false;
+ }
+ }
+ return false;
+ });
}
/**
- * Adds an HTML anchor tag to the summary buffer
- *
- * @param {string} text link text/content
- * @param {string} href hyperlink
- *
- * @returns {Summary} summary instance
+ * Updates the size of the artifact from -1 which was initially set when the container was first created for the artifact.
+ * Updating the size indicates that we are done uploading all the contents of the artifact
*/
- addLink(text, href) {
- const element = this.wrap('a', text, { href });
- return this.addRaw(element).addEOL();
+ patchArtifactSize(size, artifactName) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const resourceUrl = new url_1.URL(utils_1.getArtifactUrl());
+ resourceUrl.searchParams.append('artifactName', artifactName);
+ const parameters = { Size: size };
+ const data = JSON.stringify(parameters, null, 2);
+ core.debug(`URL is ${resourceUrl.toString()}`);
+ // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately
+ const client = this.uploadHttpManager.getClient(0);
+ const headers = utils_1.getUploadHeaders('application/json', false);
+ // Extra information to display when a particular HTTP code is returned
+ const customErrorMessages = new Map([
+ [
+ http_client_1.HttpCodes.NotFound,
+ `An Artifact with the name ${artifactName} was not found`
+ ]
+ ]);
+ // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this
+ const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages);
+ yield response.readBody();
+ core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`);
+ });
}
}
-const _summary = new Summary();
-/**
- * @deprecated use `core.summary`
- */
-exports.markdownSummary = _summary;
-exports.summary = _summary;
-//# sourceMappingURL=summary.js.map
+exports.UploadHttpClient = UploadHttpClient;
+//# sourceMappingURL=upload-http-client.js.map
/***/ }),
-/***/ 5278:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 183:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
-// We use any as a valid input type
-/* eslint-disable @typescript-eslint/no-explicit-any */
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.toCommandProperties = exports.toCommandValue = void 0;
+exports.getUploadSpecification = void 0;
+const fs = __importStar(__nccwpck_require__(7147));
+const core_1 = __nccwpck_require__(2186);
+const path_1 = __nccwpck_require__(1017);
+const path_and_artifact_name_validation_1 = __nccwpck_require__(7398);
/**
- * Sanitizes an input into a string so it can be passed into issueCommand safely
- * @param input input to sanitize into a string
+ * Creates a specification that describes how each file that is part of the artifact will be uploaded
+ * @param artifactName the name of the artifact being uploaded. Used during upload to denote where the artifact is stored on the server
+ * @param rootDirectory an absolute file path that denotes the path that should be removed from the beginning of each artifact file
+ * @param artifactFiles a list of absolute file paths that denote what should be uploaded as part of the artifact
*/
-function toCommandValue(input) {
- if (input === null || input === undefined) {
- return '';
- }
- else if (typeof input === 'string' || input instanceof String) {
- return input;
+function getUploadSpecification(artifactName, rootDirectory, artifactFiles) {
+ // artifact name was checked earlier on, no need to check again
+ const specifications = [];
+ if (!fs.existsSync(rootDirectory)) {
+ throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`);
+ }
+ if (!fs.lstatSync(rootDirectory).isDirectory()) {
+ throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`);
+ }
+ // Normalize and resolve, this allows for either absolute or relative paths to be used
+ rootDirectory = path_1.normalize(rootDirectory);
+ rootDirectory = path_1.resolve(rootDirectory);
+ /*
+ Example to demonstrate behavior
+
+ Input:
+ artifactName: my-artifact
+ rootDirectory: '/home/user/files/plz-upload'
+ artifactFiles: [
+ '/home/user/files/plz-upload/file1.txt',
+ '/home/user/files/plz-upload/file2.txt',
+ '/home/user/files/plz-upload/dir/file3.txt'
+ ]
+
+ Output:
+ specifications: [
+ ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file1.txt'],
+ ['/home/user/files/plz-upload/file1.txt', 'my-artifact/file2.txt'],
+ ['/home/user/files/plz-upload/file1.txt', 'my-artifact/dir/file3.txt']
+ ]
+ */
+ for (let file of artifactFiles) {
+ if (!fs.existsSync(file)) {
+ throw new Error(`File ${file} does not exist`);
+ }
+ if (!fs.lstatSync(file).isDirectory()) {
+ // Normalize and resolve, this allows for either absolute or relative paths to be used
+ file = path_1.normalize(file);
+ file = path_1.resolve(file);
+ if (!file.startsWith(rootDirectory)) {
+ throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`);
+ }
+ // Check for forbidden characters in file paths that will be rejected during upload
+ const uploadPath = file.replace(rootDirectory, '');
+ path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath);
+ /*
+ uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all
+ be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts
+
+ path.join handles all the following cases and would return 'artifact-name/file-to-upload.txt
+ join('artifact-name/', 'file-to-upload.txt')
+ join('artifact-name/', '/file-to-upload.txt')
+ join('artifact-name', 'file-to-upload.txt')
+ join('artifact-name', '/file-to-upload.txt')
+ */
+ specifications.push({
+ absoluteFilePath: file,
+ uploadFilePath: path_1.join(artifactName, uploadPath)
+ });
+ }
+ else {
+ // Directories are rejected by the server during upload
+ core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`);
+ }
}
- return JSON.stringify(input);
+ return specifications;
}
-exports.toCommandValue = toCommandValue;
-/**
- *
- * @param annotationProperties
- * @returns The command properties to send with the actual annotation command
- * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
- */
-function toCommandProperties(annotationProperties) {
- if (!Object.keys(annotationProperties).length) {
- return {};
- }
- return {
- title: annotationProperties.title,
- file: annotationProperties.file,
- line: annotationProperties.startLine,
- endLine: annotationProperties.endLine,
- col: annotationProperties.startColumn,
- endColumn: annotationProperties.endColumn
- };
-}
-exports.toCommandProperties = toCommandProperties;
-//# sourceMappingURL=utils.js.map
+exports.getUploadSpecification = getUploadSpecification;
+//# sourceMappingURL=upload-specification.js.map
/***/ }),
-/***/ 4087:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 6327:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.Context = void 0;
+exports.digestForStream = exports.sleep = exports.getProperRetention = exports.rmFile = exports.getFileSize = exports.createEmptyFilesForArtifact = exports.createDirectoriesForArtifact = exports.displayHttpDiagnostics = exports.getArtifactUrl = exports.createHttpClient = exports.getUploadHeaders = exports.getDownloadHeaders = exports.getContentRange = exports.tryGetRetryAfterValueTimeInMilliseconds = exports.isThrottledStatusCode = exports.isRetryableStatusCode = exports.isForbiddenStatusCode = exports.isSuccessStatusCode = exports.getApiVersion = exports.parseEnvNumber = exports.getExponentialRetryTimeInMilliseconds = void 0;
+const crypto_1 = __importDefault(__nccwpck_require__(6113));
const fs_1 = __nccwpck_require__(7147);
-const os_1 = __nccwpck_require__(2037);
-class Context {
- /**
- * Hydrate the context from the environment
- */
- constructor() {
- var _a, _b, _c;
- this.payload = {};
- if (process.env.GITHUB_EVENT_PATH) {
- if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
- this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
- }
- else {
- const path = process.env.GITHUB_EVENT_PATH;
- process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
- }
+const core_1 = __nccwpck_require__(2186);
+const http_client_1 = __nccwpck_require__(6255);
+const auth_1 = __nccwpck_require__(5526);
+const config_variables_1 = __nccwpck_require__(2222);
+const crc64_1 = __importDefault(__nccwpck_require__(3549));
+/**
+ * Returns a retry time in milliseconds that exponentially gets larger
+ * depending on the amount of retries that have been attempted
+ */
+function getExponentialRetryTimeInMilliseconds(retryCount) {
+ if (retryCount < 0) {
+ throw new Error('RetryCount should not be negative');
+ }
+ else if (retryCount === 0) {
+ return config_variables_1.getInitialRetryIntervalInMilliseconds();
+ }
+ const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount;
+ const maxTime = minTime * config_variables_1.getRetryMultiplier();
+ // returns a random number between the minTime (inclusive) and the maxTime (exclusive)
+ return Math.trunc(Math.random() * (maxTime - minTime) + minTime);
+}
+exports.getExponentialRetryTimeInMilliseconds = getExponentialRetryTimeInMilliseconds;
+/**
+ * Parses a env variable that is a number
+ */
+function parseEnvNumber(key) {
+ const value = Number(process.env[key]);
+ if (Number.isNaN(value) || value < 0) {
+ return undefined;
+ }
+ return value;
+}
+exports.parseEnvNumber = parseEnvNumber;
+/**
+ * Various utility functions to help with the necessary API calls
+ */
+function getApiVersion() {
+ return '6.0-preview';
+}
+exports.getApiVersion = getApiVersion;
+function isSuccessStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ return statusCode >= 200 && statusCode < 300;
+}
+exports.isSuccessStatusCode = isSuccessStatusCode;
+function isForbiddenStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ return statusCode === http_client_1.HttpCodes.Forbidden;
+}
+exports.isForbiddenStatusCode = isForbiddenStatusCode;
+function isRetryableStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ const retryableStatusCodes = [
+ http_client_1.HttpCodes.BadGateway,
+ http_client_1.HttpCodes.GatewayTimeout,
+ http_client_1.HttpCodes.InternalServerError,
+ http_client_1.HttpCodes.ServiceUnavailable,
+ http_client_1.HttpCodes.TooManyRequests,
+ 413 // Payload Too Large
+ ];
+ return retryableStatusCodes.includes(statusCode);
+}
+exports.isRetryableStatusCode = isRetryableStatusCode;
+function isThrottledStatusCode(statusCode) {
+ if (!statusCode) {
+ return false;
+ }
+ return statusCode === http_client_1.HttpCodes.TooManyRequests;
+}
+exports.isThrottledStatusCode = isThrottledStatusCode;
+/**
+ * Attempts to get the retry-after value from a set of http headers. The retry time
+ * is originally denoted in seconds, so if present, it is converted to milliseconds
+ * @param headers all the headers received when making an http call
+ */
+function tryGetRetryAfterValueTimeInMilliseconds(headers) {
+ if (headers['retry-after']) {
+ const retryTime = Number(headers['retry-after']);
+ if (!isNaN(retryTime)) {
+ core_1.info(`Retry-After header is present with a value of ${retryTime}`);
+ return retryTime * 1000;
}
- this.eventName = process.env.GITHUB_EVENT_NAME;
- this.sha = process.env.GITHUB_SHA;
- this.ref = process.env.GITHUB_REF;
- this.workflow = process.env.GITHUB_WORKFLOW;
- this.action = process.env.GITHUB_ACTION;
- this.actor = process.env.GITHUB_ACTOR;
- this.job = process.env.GITHUB_JOB;
- this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
- this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
- this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
- this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
- this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
+ core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`);
+ return undefined;
}
- get issue() {
- const payload = this.payload;
- return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
+ core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`);
+ // eslint-disable-next-line no-console
+ console.log(headers);
+ return undefined;
+}
+exports.tryGetRetryAfterValueTimeInMilliseconds = tryGetRetryAfterValueTimeInMilliseconds;
+function getContentRange(start, end, total) {
+ // Format: `bytes start-end/fileSize
+ // start and end are inclusive
+ // For a 200 byte chunk starting at byte 0:
+ // Content-Range: bytes 0-199/200
+ return `bytes ${start}-${end}/${total}`;
+}
+exports.getContentRange = getContentRange;
+/**
+ * Sets all the necessary headers when downloading an artifact
+ * @param {string} contentType the type of content being uploaded
+ * @param {boolean} isKeepAlive is the same connection being used to make multiple calls
+ * @param {boolean} acceptGzip can we accept a gzip encoded response
+ * @param {string} acceptType the type of content that we can accept
+ * @returns appropriate headers to make a specific http call during artifact download
+ */
+function getDownloadHeaders(contentType, isKeepAlive, acceptGzip) {
+ const requestOptions = {};
+ if (contentType) {
+ requestOptions['Content-Type'] = contentType;
}
- get repo() {
- if (process.env.GITHUB_REPOSITORY) {
- const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
- return { owner, repo };
+ if (isKeepAlive) {
+ requestOptions['Connection'] = 'Keep-Alive';
+ // keep alive for at least 10 seconds before closing the connection
+ requestOptions['Keep-Alive'] = '10';
+ }
+ if (acceptGzip) {
+ // if we are expecting a response with gzip encoding, it should be using an octet-stream in the accept header
+ requestOptions['Accept-Encoding'] = 'gzip';
+ requestOptions['Accept'] = `application/octet-stream;api-version=${getApiVersion()}`;
+ }
+ else {
+ // default to application/json if we are not working with gzip content
+ requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`;
+ }
+ return requestOptions;
+}
+exports.getDownloadHeaders = getDownloadHeaders;
+/**
+ * Sets all the necessary headers when uploading an artifact
+ * @param {string} contentType the type of content being uploaded
+ * @param {boolean} isKeepAlive is the same connection being used to make multiple calls
+ * @param {boolean} isGzip is the connection being used to upload GZip compressed content
+ * @param {number} uncompressedLength the original size of the content if something is being uploaded that has been compressed
+ * @param {number} contentLength the length of the content that is being uploaded
+ * @param {string} contentRange the range of the content that is being uploaded
+ * @returns appropriate headers to make a specific http call during artifact upload
+ */
+function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, contentLength, contentRange, digest) {
+ const requestOptions = {};
+ requestOptions['Accept'] = `application/json;api-version=${getApiVersion()}`;
+ if (contentType) {
+ requestOptions['Content-Type'] = contentType;
+ }
+ if (isKeepAlive) {
+ requestOptions['Connection'] = 'Keep-Alive';
+ // keep alive for at least 10 seconds before closing the connection
+ requestOptions['Keep-Alive'] = '10';
+ }
+ if (isGzip) {
+ requestOptions['Content-Encoding'] = 'gzip';
+ requestOptions['x-tfs-filelength'] = uncompressedLength;
+ }
+ if (contentLength) {
+ requestOptions['Content-Length'] = contentLength;
+ }
+ if (contentRange) {
+ requestOptions['Content-Range'] = contentRange;
+ }
+ if (digest) {
+ requestOptions['x-actions-results-crc64'] = digest.crc64;
+ requestOptions['x-actions-results-md5'] = digest.md5;
+ }
+ return requestOptions;
+}
+exports.getUploadHeaders = getUploadHeaders;
+function createHttpClient(userAgent) {
+ return new http_client_1.HttpClient(userAgent, [
+ new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken())
+ ]);
+}
+exports.createHttpClient = createHttpClient;
+function getArtifactUrl() {
+ const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`;
+ core_1.debug(`Artifact Url: ${artifactUrl}`);
+ return artifactUrl;
+}
+exports.getArtifactUrl = getArtifactUrl;
+/**
+ * Uh oh! Something might have gone wrong during either upload or download. The IHtttpClientResponse object contains information
+ * about the http call that was made by the actions http client. This information might be useful to display for diagnostic purposes, but
+ * this entire object is really big and most of the information is not really useful. This function takes the response object and displays only
+ * the information that we want.
+ *
+ * Certain information such as the TLSSocket and the Readable state are not really useful for diagnostic purposes so they can be avoided.
+ * Other information such as the headers, the response code and message might be useful, so this is displayed.
+ */
+function displayHttpDiagnostics(response) {
+ core_1.info(`##### Begin Diagnostic HTTP information #####
+Status Code: ${response.message.statusCode}
+Status Message: ${response.message.statusMessage}
+Header Information: ${JSON.stringify(response.message.headers, undefined, 2)}
+###### End Diagnostic HTTP information ######`);
+}
+exports.displayHttpDiagnostics = displayHttpDiagnostics;
+function createDirectoriesForArtifact(directories) {
+ return __awaiter(this, void 0, void 0, function* () {
+ for (const directory of directories) {
+ yield fs_1.promises.mkdir(directory, {
+ recursive: true
+ });
}
- if (this.payload.repository) {
- return {
- owner: this.payload.repository.owner.login,
- repo: this.payload.repository.name
- };
+ });
+}
+exports.createDirectoriesForArtifact = createDirectoriesForArtifact;
+function createEmptyFilesForArtifact(emptyFilesToCreate) {
+ return __awaiter(this, void 0, void 0, function* () {
+ for (const filePath of emptyFilesToCreate) {
+ yield (yield fs_1.promises.open(filePath, 'w')).close();
+ }
+ });
+}
+exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact;
+function getFileSize(filePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const stats = yield fs_1.promises.stat(filePath);
+ core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`);
+ return stats.size;
+ });
+}
+exports.getFileSize = getFileSize;
+function rmFile(filePath) {
+ return __awaiter(this, void 0, void 0, function* () {
+ yield fs_1.promises.unlink(filePath);
+ });
+}
+exports.rmFile = rmFile;
+function getProperRetention(retentionInput, retentionSetting) {
+ if (retentionInput < 0) {
+ throw new Error('Invalid retention, minimum value is 1.');
+ }
+ let retention = retentionInput;
+ if (retentionSetting) {
+ const maxRetention = parseInt(retentionSetting);
+ if (!isNaN(maxRetention) && maxRetention < retention) {
+ core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`);
+ retention = maxRetention;
}
- throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
}
+ return retention;
}
-exports.Context = Context;
-//# sourceMappingURL=context.js.map
+exports.getProperRetention = getProperRetention;
+function sleep(milliseconds) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise(resolve => setTimeout(resolve, milliseconds));
+ });
+}
+exports.sleep = sleep;
+function digestForStream(stream) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => {
+ const crc64 = new crc64_1.default();
+ const md5 = crypto_1.default.createHash('md5');
+ stream
+ .on('data', data => {
+ crc64.update(data);
+ md5.update(data);
+ })
+ .on('end', () => resolve({
+ crc64: crc64.digest('base64'),
+ md5: md5.digest('base64')
+ }))
+ .on('error', reject);
+ });
+ });
+}
+exports.digestForStream = digestForStream;
+//# sourceMappingURL=utils.js.map
/***/ }),
-/***/ 5438:
+/***/ 7351:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1613,26 +2914,81 @@ var __importStar = (this && this.__importStar) || function (mod) {
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getOctokit = exports.context = void 0;
-const Context = __importStar(__nccwpck_require__(4087));
-const utils_1 = __nccwpck_require__(3030);
-exports.context = new Context.Context();
+exports.issue = exports.issueCommand = void 0;
+const os = __importStar(__nccwpck_require__(2037));
+const utils_1 = __nccwpck_require__(5278);
/**
- * Returns a hydrated octokit ready to use for GitHub Actions
+ * Commands
*
- * @param token the repo PAT or GITHUB_TOKEN
- * @param options other options to set
+ * Command Format:
+ * ::name key=value,key=value::message
+ *
+ * Examples:
+ * ::warning::This is the message
+ * ::set-env name=MY_VAR::some value
*/
-function getOctokit(token, options, ...additionalPlugins) {
- const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
- return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
+function issueCommand(command, properties, message) {
+ const cmd = new Command(command, properties, message);
+ process.stdout.write(cmd.toString() + os.EOL);
}
-exports.getOctokit = getOctokit;
-//# sourceMappingURL=github.js.map
+exports.issueCommand = issueCommand;
+function issue(name, message = '') {
+ issueCommand(name, {}, message);
+}
+exports.issue = issue;
+const CMD_STRING = '::';
+class Command {
+ constructor(command, properties, message) {
+ if (!command) {
+ command = 'missing.command';
+ }
+ this.command = command;
+ this.properties = properties;
+ this.message = message;
+ }
+ toString() {
+ let cmdStr = CMD_STRING + this.command;
+ if (this.properties && Object.keys(this.properties).length > 0) {
+ cmdStr += ' ';
+ let first = true;
+ for (const key in this.properties) {
+ if (this.properties.hasOwnProperty(key)) {
+ const val = this.properties[key];
+ if (val) {
+ if (first) {
+ first = false;
+ }
+ else {
+ cmdStr += ',';
+ }
+ cmdStr += `${key}=${escapeProperty(val)}`;
+ }
+ }
+ }
+ }
+ cmdStr += `${CMD_STRING}${escapeData(this.message)}`;
+ return cmdStr;
+ }
+}
+function escapeData(s) {
+ return utils_1.toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A');
+}
+function escapeProperty(s) {
+ return utils_1.toCommandValue(s)
+ .replace(/%/g, '%25')
+ .replace(/\r/g, '%0D')
+ .replace(/\n/g, '%0A')
+ .replace(/:/g, '%3A')
+ .replace(/,/g, '%2C');
+}
+//# sourceMappingURL=command.js.map
/***/ }),
-/***/ 7914:
+/***/ 2186:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
@@ -1656,187 +3012,331 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
-const httpClient = __importStar(__nccwpck_require__(6255));
-function getAuthString(token, options) {
- if (!token && !options.auth) {
- throw new Error('Parameter token or opts.auth is required');
- }
- else if (token && options.auth) {
- throw new Error('Parameters token and opts.auth may not both be specified');
- }
- return typeof options.auth === 'string' ? options.auth : `token ${token}`;
-}
-exports.getAuthString = getAuthString;
-function getProxyAgent(destinationUrl) {
- const hc = new httpClient.HttpClient();
- return hc.getAgent(destinationUrl);
-}
-exports.getProxyAgent = getProxyAgent;
-function getApiBaseUrl() {
- return process.env['GITHUB_API_URL'] || 'https://api.github.com';
-}
-exports.getApiBaseUrl = getApiBaseUrl;
-//# sourceMappingURL=utils.js.map
-
-/***/ }),
-
-/***/ 3030:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-
-"use strict";
-
-var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
-}) : (function(o, m, k, k2) {
- if (k2 === undefined) k2 = k;
- o[k2] = m[k];
-}));
-var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
- Object.defineProperty(o, "default", { enumerable: true, value: v });
-}) : function(o, v) {
- o["default"] = v;
-});
-var __importStar = (this && this.__importStar) || function (mod) {
- if (mod && mod.__esModule) return mod;
- var result = {};
- if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
- __setModuleDefault(result, mod);
- return result;
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;
-const Context = __importStar(__nccwpck_require__(4087));
-const Utils = __importStar(__nccwpck_require__(7914));
-// octokit + plugins
-const core_1 = __nccwpck_require__(6762);
-const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044);
-const plugin_paginate_rest_1 = __nccwpck_require__(4193);
-exports.context = new Context.Context();
-const baseUrl = Utils.getApiBaseUrl();
-exports.defaults = {
- baseUrl,
- request: {
- agent: Utils.getProxyAgent(baseUrl)
- }
-};
-exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
+exports.getIDToken = exports.getState = exports.saveState = exports.group = exports.endGroup = exports.startGroup = exports.info = exports.notice = exports.warning = exports.error = exports.debug = exports.isDebug = exports.setFailed = exports.setCommandEcho = exports.setOutput = exports.getBooleanInput = exports.getMultilineInput = exports.getInput = exports.addPath = exports.setSecret = exports.exportVariable = exports.ExitCode = void 0;
+const command_1 = __nccwpck_require__(7351);
+const file_command_1 = __nccwpck_require__(717);
+const utils_1 = __nccwpck_require__(5278);
+const os = __importStar(__nccwpck_require__(2037));
+const path = __importStar(__nccwpck_require__(1017));
+const oidc_utils_1 = __nccwpck_require__(8041);
/**
- * Convience function to correctly format Octokit Options to pass into the constructor.
- *
- * @param token the repo PAT or GITHUB_TOKEN
- * @param options other options to set
+ * The code to exit an action
*/
-function getOctokitOptions(token, options) {
- const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
- // Auth
- const auth = Utils.getAuthString(token, opts);
- if (auth) {
- opts.auth = auth;
+var ExitCode;
+(function (ExitCode) {
+ /**
+ * A code indicating that the action was successful
+ */
+ ExitCode[ExitCode["Success"] = 0] = "Success";
+ /**
+ * A code indicating that the action was a failure
+ */
+ ExitCode[ExitCode["Failure"] = 1] = "Failure";
+})(ExitCode = exports.ExitCode || (exports.ExitCode = {}));
+//-----------------------------------------------------------------------
+// Variables
+//-----------------------------------------------------------------------
+/**
+ * Sets env variable for this action and future actions in the job
+ * @param name the name of the variable to set
+ * @param val the value of the variable. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function exportVariable(name, val) {
+ const convertedVal = utils_1.toCommandValue(val);
+ process.env[name] = convertedVal;
+ const filePath = process.env['GITHUB_ENV'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('ENV', file_command_1.prepareKeyValueMessage(name, val));
}
- return opts;
+ command_1.issueCommand('set-env', { name }, convertedVal);
}
-exports.getOctokitOptions = getOctokitOptions;
-//# sourceMappingURL=utils.js.map
-
-/***/ }),
-
-/***/ 5526:
-/***/ (function(__unused_webpack_module, exports) {
-
-"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
-class BasicCredentialHandler {
- constructor(username, password) {
- this.username = username;
- this.password = password;
- }
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
- }
- // This handler cannot handle 401
- canHandleAuthentication() {
- return false;
- }
- handleAuthentication() {
- return __awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
- }
+exports.exportVariable = exportVariable;
+/**
+ * Registers a secret which will get masked from logs
+ * @param secret value of the secret
+ */
+function setSecret(secret) {
+ command_1.issueCommand('add-mask', {}, secret);
}
-exports.BasicCredentialHandler = BasicCredentialHandler;
-class BearerCredentialHandler {
- constructor(token) {
- this.token = token;
+exports.setSecret = setSecret;
+/**
+ * Prepends inputPath to the PATH (for this action and future actions)
+ * @param inputPath
+ */
+function addPath(inputPath) {
+ const filePath = process.env['GITHUB_PATH'] || '';
+ if (filePath) {
+ file_command_1.issueFileCommand('PATH', inputPath);
}
- // currently implements pre-authorization
- // TODO: support preAuth = false where it hooks on 401
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Bearer ${this.token}`;
+ else {
+ command_1.issueCommand('add-path', {}, inputPath);
}
- // This handler cannot handle 401
- canHandleAuthentication() {
- return false;
+ process.env['PATH'] = `${inputPath}${path.delimiter}${process.env['PATH']}`;
+}
+exports.addPath = addPath;
+/**
+ * Gets the value of an input.
+ * Unless trimWhitespace is set to false in InputOptions, the value is also trimmed.
+ * Returns an empty string if the value is not defined.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string
+ */
+function getInput(name, options) {
+ const val = process.env[`INPUT_${name.replace(/ /g, '_').toUpperCase()}`] || '';
+ if (options && options.required && !val) {
+ throw new Error(`Input required and not supplied: ${name}`);
}
- handleAuthentication() {
- return __awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
+ if (options && options.trimWhitespace === false) {
+ return val;
}
+ return val.trim();
}
-exports.BearerCredentialHandler = BearerCredentialHandler;
-class PersonalAccessTokenCredentialHandler {
- constructor(token) {
- this.token = token;
- }
- // currently implements pre-authorization
- // TODO: support preAuth = false where it hooks on 401
- prepareRequest(options) {
- if (!options.headers) {
- throw Error('The request has no headers');
- }
- options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
+exports.getInput = getInput;
+/**
+ * Gets the values of an multiline input. Each value is also trimmed.
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns string[]
+ *
+ */
+function getMultilineInput(name, options) {
+ const inputs = getInput(name, options)
+ .split('\n')
+ .filter(x => x !== '');
+ if (options && options.trimWhitespace === false) {
+ return inputs;
}
- // This handler cannot handle 401
- canHandleAuthentication() {
+ return inputs.map(input => input.trim());
+}
+exports.getMultilineInput = getMultilineInput;
+/**
+ * Gets the input value of the boolean type in the YAML 1.2 "core schema" specification.
+ * Support boolean input list: `true | True | TRUE | false | False | FALSE` .
+ * The return value is also in boolean type.
+ * ref: https://yaml.org/spec/1.2/spec.html#id2804923
+ *
+ * @param name name of the input to get
+ * @param options optional. See InputOptions.
+ * @returns boolean
+ */
+function getBooleanInput(name, options) {
+ const trueValue = ['true', 'True', 'TRUE'];
+ const falseValue = ['false', 'False', 'FALSE'];
+ const val = getInput(name, options);
+ if (trueValue.includes(val))
+ return true;
+ if (falseValue.includes(val))
return false;
+ throw new TypeError(`Input does not meet YAML 1.2 "Core Schema" specification: ${name}\n` +
+ `Support boolean input list: \`true | True | TRUE | false | False | FALSE\``);
+}
+exports.getBooleanInput = getBooleanInput;
+/**
+ * Sets the value of an output.
+ *
+ * @param name name of the output to set
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function setOutput(name, value) {
+ const filePath = process.env['GITHUB_OUTPUT'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('OUTPUT', file_command_1.prepareKeyValueMessage(name, value));
}
- handleAuthentication() {
- return __awaiter(this, void 0, void 0, function* () {
- throw new Error('not implemented');
- });
- }
+ process.stdout.write(os.EOL);
+ command_1.issueCommand('set-output', { name }, utils_1.toCommandValue(value));
}
-exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
-//# sourceMappingURL=auth.js.map
+exports.setOutput = setOutput;
+/**
+ * Enables or disables the echoing of commands into stdout for the rest of the step.
+ * Echoing is disabled by default if ACTIONS_STEP_DEBUG is not set.
+ *
+ */
+function setCommandEcho(enabled) {
+ command_1.issue('echo', enabled ? 'on' : 'off');
+}
+exports.setCommandEcho = setCommandEcho;
+//-----------------------------------------------------------------------
+// Results
+//-----------------------------------------------------------------------
+/**
+ * Sets the action status to failed.
+ * When the action exits it will be with an exit code of 1
+ * @param message add error issue message
+ */
+function setFailed(message) {
+ process.exitCode = ExitCode.Failure;
+ error(message);
+}
+exports.setFailed = setFailed;
+//-----------------------------------------------------------------------
+// Logging Commands
+//-----------------------------------------------------------------------
+/**
+ * Gets whether Actions Step Debug is on or not
+ */
+function isDebug() {
+ return process.env['RUNNER_DEBUG'] === '1';
+}
+exports.isDebug = isDebug;
+/**
+ * Writes debug message to user log
+ * @param message debug message
+ */
+function debug(message) {
+ command_1.issueCommand('debug', {}, message);
+}
+exports.debug = debug;
+/**
+ * Adds an error issue
+ * @param message error issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function error(message, properties = {}) {
+ command_1.issueCommand('error', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.error = error;
+/**
+ * Adds a warning issue
+ * @param message warning issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function warning(message, properties = {}) {
+ command_1.issueCommand('warning', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.warning = warning;
+/**
+ * Adds a notice issue
+ * @param message notice issue message. Errors will be converted to string via toString()
+ * @param properties optional properties to add to the annotation.
+ */
+function notice(message, properties = {}) {
+ command_1.issueCommand('notice', utils_1.toCommandProperties(properties), message instanceof Error ? message.toString() : message);
+}
+exports.notice = notice;
+/**
+ * Writes info to log with console.log.
+ * @param message info message
+ */
+function info(message) {
+ process.stdout.write(message + os.EOL);
+}
+exports.info = info;
+/**
+ * Begin an output group.
+ *
+ * Output until the next `groupEnd` will be foldable in this group
+ *
+ * @param name The name of the output group
+ */
+function startGroup(name) {
+ command_1.issue('group', name);
+}
+exports.startGroup = startGroup;
+/**
+ * End an output group.
+ */
+function endGroup() {
+ command_1.issue('endgroup');
+}
+exports.endGroup = endGroup;
+/**
+ * Wrap an asynchronous function call in a group.
+ *
+ * Returns the same type as the function itself.
+ *
+ * @param name The name of the group
+ * @param fn The function to wrap in the group
+ */
+function group(name, fn) {
+ return __awaiter(this, void 0, void 0, function* () {
+ startGroup(name);
+ let result;
+ try {
+ result = yield fn();
+ }
+ finally {
+ endGroup();
+ }
+ return result;
+ });
+}
+exports.group = group;
+//-----------------------------------------------------------------------
+// Wrapper action state
+//-----------------------------------------------------------------------
+/**
+ * Saves state for current action, the state can only be retrieved by this action's post job execution.
+ *
+ * @param name name of the state to store
+ * @param value value to store. Non-string values will be converted to a string via JSON.stringify
+ */
+// eslint-disable-next-line @typescript-eslint/no-explicit-any
+function saveState(name, value) {
+ const filePath = process.env['GITHUB_STATE'] || '';
+ if (filePath) {
+ return file_command_1.issueFileCommand('STATE', file_command_1.prepareKeyValueMessage(name, value));
+ }
+ command_1.issueCommand('save-state', { name }, utils_1.toCommandValue(value));
+}
+exports.saveState = saveState;
+/**
+ * Gets the value of an state set by this action's main execution.
+ *
+ * @param name name of the state to get
+ * @returns string
+ */
+function getState(name) {
+ return process.env[`STATE_${name}`] || '';
+}
+exports.getState = getState;
+function getIDToken(aud) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return yield oidc_utils_1.OidcClient.getIDToken(aud);
+ });
+}
+exports.getIDToken = getIDToken;
+/**
+ * Summary exports
+ */
+var summary_1 = __nccwpck_require__(1327);
+Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
+/**
+ * @deprecated use core.summary
+ */
+var summary_2 = __nccwpck_require__(1327);
+Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
+/**
+ * Path exports
+ */
+var path_utils_1 = __nccwpck_require__(2981);
+Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
+Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
+Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
+//# sourceMappingURL=core.js.map
/***/ }),
-/***/ 6255:
+/***/ 717:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
-/* eslint-disable @typescript-eslint/no-explicit-any */
+// For internal use, subject to change.
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
@@ -1856,6 +3356,51 @@ var __importStar = (this && this.__importStar) || function (mod) {
__setModuleDefault(result, mod);
return result;
};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.prepareKeyValueMessage = exports.issueFileCommand = void 0;
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
+const fs = __importStar(__nccwpck_require__(7147));
+const os = __importStar(__nccwpck_require__(2037));
+const uuid_1 = __nccwpck_require__(5840);
+const utils_1 = __nccwpck_require__(5278);
+function issueFileCommand(command, message) {
+ const filePath = process.env[`GITHUB_${command}`];
+ if (!filePath) {
+ throw new Error(`Unable to find environment variable for file command ${command}`);
+ }
+ if (!fs.existsSync(filePath)) {
+ throw new Error(`Missing file at path: ${filePath}`);
+ }
+ fs.appendFileSync(filePath, `${utils_1.toCommandValue(message)}${os.EOL}`, {
+ encoding: 'utf8'
+ });
+}
+exports.issueFileCommand = issueFileCommand;
+function prepareKeyValueMessage(key, value) {
+ const delimiter = `ghadelimiter_${uuid_1.v4()}`;
+ const convertedValue = utils_1.toCommandValue(value);
+ // These should realistically never happen, but just in case someone finds a
+ // way to exploit uuid generation let's not allow keys or values that contain
+ // the delimiter.
+ if (key.includes(delimiter)) {
+ throw new Error(`Unexpected input: name should not contain the delimiter "${delimiter}"`);
+ }
+ if (convertedValue.includes(delimiter)) {
+ throw new Error(`Unexpected input: value should not contain the delimiter "${delimiter}"`);
+ }
+ return `${key}<<${delimiter}${os.EOL}${convertedValue}${os.EOL}${delimiter}`;
+}
+exports.prepareKeyValueMessage = prepareKeyValueMessage;
+//# sourceMappingURL=file-command.js.map
+
+/***/ }),
+
+/***/ 8041:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
@@ -1866,1706 +3411,4460 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
-const http = __importStar(__nccwpck_require__(3685));
-const https = __importStar(__nccwpck_require__(5687));
-const pm = __importStar(__nccwpck_require__(9835));
-const tunnel = __importStar(__nccwpck_require__(4294));
-var HttpCodes;
-(function (HttpCodes) {
- HttpCodes[HttpCodes["OK"] = 200] = "OK";
- HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
- HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
- HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
- HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
- HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
- HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
- HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
- HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
- HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
- HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
- HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
- HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
- HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
- HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
- HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
- HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
- HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
- HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
- HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
- HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
- HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
- HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
- HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
- HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
- HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
- HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
-})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
-var Headers;
-(function (Headers) {
- Headers["Accept"] = "accept";
- Headers["ContentType"] = "content-type";
-})(Headers = exports.Headers || (exports.Headers = {}));
-var MediaTypes;
-(function (MediaTypes) {
- MediaTypes["ApplicationJson"] = "application/json";
-})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
-/**
- * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
- */
-function getProxyUrl(serverUrl) {
- const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
- return proxyUrl ? proxyUrl.href : '';
-}
-exports.getProxyUrl = getProxyUrl;
-const HttpRedirectCodes = [
- HttpCodes.MovedPermanently,
- HttpCodes.ResourceMoved,
- HttpCodes.SeeOther,
- HttpCodes.TemporaryRedirect,
- HttpCodes.PermanentRedirect
-];
-const HttpResponseRetryCodes = [
- HttpCodes.BadGateway,
- HttpCodes.ServiceUnavailable,
- HttpCodes.GatewayTimeout
-];
-const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
-const ExponentialBackoffCeiling = 10;
-const ExponentialBackoffTimeSlice = 5;
-class HttpClientError extends Error {
- constructor(message, statusCode) {
- super(message);
- this.name = 'HttpClientError';
- this.statusCode = statusCode;
- Object.setPrototypeOf(this, HttpClientError.prototype);
- }
-}
-exports.HttpClientError = HttpClientError;
-class HttpClientResponse {
- constructor(message) {
- this.message = message;
+exports.OidcClient = void 0;
+const http_client_1 = __nccwpck_require__(6255);
+const auth_1 = __nccwpck_require__(5526);
+const core_1 = __nccwpck_require__(2186);
+class OidcClient {
+ static createHttpClient(allowRetry = true, maxRetry = 10) {
+ const requestOptions = {
+ allowRetries: allowRetry,
+ maxRetries: maxRetry
+ };
+ return new http_client_1.HttpClient('actions/oidc-client', [new auth_1.BearerCredentialHandler(OidcClient.getRequestToken())], requestOptions);
}
- readBody() {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
- let output = Buffer.alloc(0);
- this.message.on('data', (chunk) => {
- output = Buffer.concat([output, chunk]);
- });
- this.message.on('end', () => {
- resolve(output.toString());
- });
- }));
- });
+ static getRequestToken() {
+ const token = process.env['ACTIONS_ID_TOKEN_REQUEST_TOKEN'];
+ if (!token) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_TOKEN env variable');
+ }
+ return token;
}
-}
-exports.HttpClientResponse = HttpClientResponse;
-function isHttps(requestUrl) {
- const parsedUrl = new URL(requestUrl);
- return parsedUrl.protocol === 'https:';
-}
-exports.isHttps = isHttps;
-class HttpClient {
- constructor(userAgent, handlers, requestOptions) {
- this._ignoreSslError = false;
- this._allowRedirects = true;
- this._allowRedirectDowngrade = false;
- this._maxRedirects = 50;
- this._allowRetries = false;
- this._maxRetries = 1;
- this._keepAlive = false;
- this._disposed = false;
- this.userAgent = userAgent;
- this.handlers = handlers || [];
- this.requestOptions = requestOptions;
- if (requestOptions) {
- if (requestOptions.ignoreSslError != null) {
- this._ignoreSslError = requestOptions.ignoreSslError;
- }
- this._socketTimeout = requestOptions.socketTimeout;
- if (requestOptions.allowRedirects != null) {
- this._allowRedirects = requestOptions.allowRedirects;
- }
- if (requestOptions.allowRedirectDowngrade != null) {
- this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
- }
- if (requestOptions.maxRedirects != null) {
- this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
- }
- if (requestOptions.keepAlive != null) {
- this._keepAlive = requestOptions.keepAlive;
- }
- if (requestOptions.allowRetries != null) {
- this._allowRetries = requestOptions.allowRetries;
- }
- if (requestOptions.maxRetries != null) {
- this._maxRetries = requestOptions.maxRetries;
- }
+ static getIDTokenUrl() {
+ const runtimeUrl = process.env['ACTIONS_ID_TOKEN_REQUEST_URL'];
+ if (!runtimeUrl) {
+ throw new Error('Unable to get ACTIONS_ID_TOKEN_REQUEST_URL env variable');
}
+ return runtimeUrl;
}
- options(requestUrl, additionalHeaders) {
+ static getCall(id_token_url) {
+ var _a;
return __awaiter(this, void 0, void 0, function* () {
- return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ const httpclient = OidcClient.createHttpClient();
+ const res = yield httpclient
+ .getJson(id_token_url)
+ .catch(error => {
+ throw new Error(`Failed to get ID Token. \n
+ Error Code : ${error.statusCode}\n
+ Error Message: ${error.result.message}`);
+ });
+ const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value;
+ if (!id_token) {
+ throw new Error('Response json body do not have ID Token field');
+ }
+ return id_token;
});
}
- get(requestUrl, additionalHeaders) {
+ static getIDToken(audience) {
return __awaiter(this, void 0, void 0, function* () {
- return this.request('GET', requestUrl, null, additionalHeaders || {});
+ try {
+ // New ID Token is requested from action service
+ let id_token_url = OidcClient.getIDTokenUrl();
+ if (audience) {
+ const encodedAudience = encodeURIComponent(audience);
+ id_token_url = `${id_token_url}&audience=${encodedAudience}`;
+ }
+ core_1.debug(`ID token url is ${id_token_url}`);
+ const id_token = yield OidcClient.getCall(id_token_url);
+ core_1.setSecret(id_token);
+ return id_token;
+ }
+ catch (error) {
+ throw new Error(`Error message: ${error.message}`);
+ }
});
}
- del(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('DELETE', requestUrl, null, additionalHeaders || {});
- });
+}
+exports.OidcClient = OidcClient;
+//# sourceMappingURL=oidc-utils.js.map
+
+/***/ }),
+
+/***/ 2981:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
+const path = __importStar(__nccwpck_require__(1017));
+/**
+ * toPosixPath converts the given path to the posix form. On Windows, \\ will be
+ * replaced with /.
+ *
+ * @param pth. Path to transform.
+ * @return string Posix path.
+ */
+function toPosixPath(pth) {
+ return pth.replace(/[\\]/g, '/');
+}
+exports.toPosixPath = toPosixPath;
+/**
+ * toWin32Path converts the given path to the win32 form. On Linux, / will be
+ * replaced with \\.
+ *
+ * @param pth. Path to transform.
+ * @return string Win32 path.
+ */
+function toWin32Path(pth) {
+ return pth.replace(/[/]/g, '\\');
+}
+exports.toWin32Path = toWin32Path;
+/**
+ * toPlatformPath converts the given path to a platform-specific path. It does
+ * this by replacing instances of / and \ with the platform-specific path
+ * separator.
+ *
+ * @param pth The path to platformize.
+ * @return string The platform-specific path.
+ */
+function toPlatformPath(pth) {
+ return pth.replace(/[/\\]/g, path.sep);
+}
+exports.toPlatformPath = toPlatformPath;
+//# sourceMappingURL=path-utils.js.map
+
+/***/ }),
+
+/***/ 1327:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
+const os_1 = __nccwpck_require__(2037);
+const fs_1 = __nccwpck_require__(7147);
+const { access, appendFile, writeFile } = fs_1.promises;
+exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
+exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
+class Summary {
+ constructor() {
+ this._buffer = '';
}
- post(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('POST', requestUrl, data, additionalHeaders || {});
- });
- }
- patch(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('PATCH', requestUrl, data, additionalHeaders || {});
- });
- }
- put(requestUrl, data, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('PUT', requestUrl, data, additionalHeaders || {});
- });
- }
- head(requestUrl, additionalHeaders) {
- return __awaiter(this, void 0, void 0, function* () {
- return this.request('HEAD', requestUrl, null, additionalHeaders || {});
- });
- }
- sendStream(verb, requestUrl, stream, additionalHeaders) {
+ /**
+ * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
+ * Also checks r/w permissions.
+ *
+ * @returns step summary file path
+ */
+ filePath() {
return __awaiter(this, void 0, void 0, function* () {
- return this.request(verb, requestUrl, stream, additionalHeaders);
+ if (this._filePath) {
+ return this._filePath;
+ }
+ const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
+ if (!pathFromEnv) {
+ throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
+ }
+ try {
+ yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
+ }
+ catch (_a) {
+ throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
+ }
+ this._filePath = pathFromEnv;
+ return this._filePath;
});
}
/**
- * Gets a typed object from an endpoint
- * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ * Wraps content in an HTML tag, adding any HTML attributes
+ *
+ * @param {string} tag HTML tag to wrap
+ * @param {string | null} content content within the tag
+ * @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
+ *
+ * @returns {string} content wrapped in HTML element
*/
- getJson(requestUrl, additionalHeaders = {}) {
- return __awaiter(this, void 0, void 0, function* () {
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- const res = yield this.get(requestUrl, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- });
+ wrap(tag, content, attrs = {}) {
+ const htmlAttrs = Object.entries(attrs)
+ .map(([key, value]) => ` ${key}="${value}"`)
+ .join('');
+ if (!content) {
+ return `<${tag}${htmlAttrs}>`;
+ }
+ return `<${tag}${htmlAttrs}>${content}${tag}>`;
}
- postJson(requestUrl, obj, additionalHeaders = {}) {
+ /**
+ * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
+ *
+ * @param {SummaryWriteOptions} [options] (optional) options for write operation
+ *
+ * @returns {Promise} summary instance
+ */
+ write(options) {
return __awaiter(this, void 0, void 0, function* () {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- const res = yield this.post(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
+ const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
+ const filePath = yield this.filePath();
+ const writeFunc = overwrite ? writeFile : appendFile;
+ yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
+ return this.emptyBuffer();
});
}
- putJson(requestUrl, obj, additionalHeaders = {}) {
+ /**
+ * Clears the summary buffer and wipes the summary file
+ *
+ * @returns {Summary} summary instance
+ */
+ clear() {
return __awaiter(this, void 0, void 0, function* () {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- const res = yield this.put(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
+ return this.emptyBuffer().write({ overwrite: true });
});
}
- patchJson(requestUrl, obj, additionalHeaders = {}) {
- return __awaiter(this, void 0, void 0, function* () {
- const data = JSON.stringify(obj, null, 2);
- additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
- additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
- const res = yield this.patch(requestUrl, data, additionalHeaders);
- return this._processResponse(res, this.requestOptions);
- });
+ /**
+ * Returns the current summary buffer as a string
+ *
+ * @returns {string} string of summary buffer
+ */
+ stringify() {
+ return this._buffer;
}
/**
- * Makes a raw http request.
- * All other methods such as get, post, patch, and request ultimately call this.
- * Prefer get, del, post and patch
+ * If the summary buffer is empty
+ *
+ * @returns {boolen} true if the buffer is empty
*/
- request(verb, requestUrl, data, headers) {
- return __awaiter(this, void 0, void 0, function* () {
- if (this._disposed) {
- throw new Error('Client has already been disposed.');
- }
- const parsedUrl = new URL(requestUrl);
- let info = this._prepareRequest(verb, parsedUrl, headers);
- // Only perform retries on reads since writes may not be idempotent.
- const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
- ? this._maxRetries + 1
- : 1;
- let numTries = 0;
- let response;
- do {
- response = yield this.requestRaw(info, data);
- // Check if it's an authentication challenge
- if (response &&
- response.message &&
- response.message.statusCode === HttpCodes.Unauthorized) {
- let authenticationHandler;
- for (const handler of this.handlers) {
- if (handler.canHandleAuthentication(response)) {
- authenticationHandler = handler;
- break;
- }
- }
- if (authenticationHandler) {
- return authenticationHandler.handleAuthentication(this, info, data);
- }
- else {
- // We have received an unauthorized response but have no handlers to handle it.
- // Let the response return to the caller.
- return response;
- }
- }
- let redirectsRemaining = this._maxRedirects;
- while (response.message.statusCode &&
- HttpRedirectCodes.includes(response.message.statusCode) &&
- this._allowRedirects &&
- redirectsRemaining > 0) {
- const redirectUrl = response.message.headers['location'];
- if (!redirectUrl) {
- // if there's no location to redirect to, we won't
- break;
- }
- const parsedRedirectUrl = new URL(redirectUrl);
- if (parsedUrl.protocol === 'https:' &&
- parsedUrl.protocol !== parsedRedirectUrl.protocol &&
- !this._allowRedirectDowngrade) {
- throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
- }
- // we need to finish reading the response before reassigning response
- // which will leak the open socket.
- yield response.readBody();
- // strip authorization header if redirected to a different hostname
- if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
- for (const header in headers) {
- // header names are case insensitive
- if (header.toLowerCase() === 'authorization') {
- delete headers[header];
- }
- }
- }
- // let's make the request with the new redirectUrl
- info = this._prepareRequest(verb, parsedRedirectUrl, headers);
- response = yield this.requestRaw(info, data);
- redirectsRemaining--;
- }
- if (!response.message.statusCode ||
- !HttpResponseRetryCodes.includes(response.message.statusCode)) {
- // If not a retry code, return immediately instead of retrying
- return response;
- }
- numTries += 1;
- if (numTries < maxTries) {
- yield response.readBody();
- yield this._performExponentialBackoff(numTries);
- }
- } while (numTries < maxTries);
- return response;
- });
+ isEmptyBuffer() {
+ return this._buffer.length === 0;
}
/**
- * Needs to be called if keepAlive is set to true in request options.
+ * Resets the summary buffer without writing to summary file
+ *
+ * @returns {Summary} summary instance
*/
- dispose() {
- if (this._agent) {
- this._agent.destroy();
- }
- this._disposed = true;
+ emptyBuffer() {
+ this._buffer = '';
+ return this;
}
/**
- * Raw request.
- * @param info
- * @param data
+ * Adds raw text to the summary buffer
+ *
+ * @param {string} text content to add
+ * @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
+ *
+ * @returns {Summary} summary instance
*/
- requestRaw(info, data) {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve, reject) => {
- function callbackForResult(err, res) {
- if (err) {
- reject(err);
- }
- else if (!res) {
- // If `err` is not passed, then `res` must be passed.
- reject(new Error('Unknown error'));
- }
- else {
- resolve(res);
- }
- }
- this.requestRawWithCallback(info, data, callbackForResult);
- });
- });
+ addRaw(text, addEOL = false) {
+ this._buffer += text;
+ return addEOL ? this.addEOL() : this;
}
/**
- * Raw request with callback.
- * @param info
- * @param data
- * @param onResult
+ * Adds the operating system-specific end-of-line marker to the buffer
+ *
+ * @returns {Summary} summary instance
*/
- requestRawWithCallback(info, data, onResult) {
- if (typeof data === 'string') {
- if (!info.options.headers) {
- info.options.headers = {};
- }
- info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
- }
- let callbackCalled = false;
- function handleResult(err, res) {
- if (!callbackCalled) {
- callbackCalled = true;
- onResult(err, res);
- }
- }
- const req = info.httpModule.request(info.options, (msg) => {
- const res = new HttpClientResponse(msg);
- handleResult(undefined, res);
- });
- let socket;
- req.on('socket', sock => {
- socket = sock;
- });
- // If we ever get disconnected, we want the socket to timeout eventually
- req.setTimeout(this._socketTimeout || 3 * 60000, () => {
- if (socket) {
- socket.end();
- }
- handleResult(new Error(`Request timeout: ${info.options.path}`));
- });
- req.on('error', function (err) {
- // err has statusCode property
- // res should have headers
- handleResult(err);
- });
- if (data && typeof data === 'string') {
- req.write(data, 'utf8');
- }
- if (data && typeof data !== 'string') {
- data.on('close', function () {
- req.end();
- });
- data.pipe(req);
- }
- else {
- req.end();
- }
+ addEOL() {
+ return this.addRaw(os_1.EOL);
}
/**
- * Gets an http agent. This function is useful when you need an http agent that handles
- * routing through a proxy server - depending upon the url and proxy environment variables.
- * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ * Adds an HTML codeblock to the summary buffer
+ *
+ * @param {string} code content to render within fenced code block
+ * @param {string} lang (optional) language to syntax highlight code
+ *
+ * @returns {Summary} summary instance
*/
- getAgent(serverUrl) {
- const parsedUrl = new URL(serverUrl);
- return this._getAgent(parsedUrl);
+ addCodeBlock(code, lang) {
+ const attrs = Object.assign({}, (lang && { lang }));
+ const element = this.wrap('pre', this.wrap('code', code), attrs);
+ return this.addRaw(element).addEOL();
}
- _prepareRequest(method, requestUrl, headers) {
- const info = {};
- info.parsedUrl = requestUrl;
- const usingSsl = info.parsedUrl.protocol === 'https:';
- info.httpModule = usingSsl ? https : http;
- const defaultPort = usingSsl ? 443 : 80;
- info.options = {};
- info.options.host = info.parsedUrl.hostname;
- info.options.port = info.parsedUrl.port
- ? parseInt(info.parsedUrl.port)
- : defaultPort;
- info.options.path =
- (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
- info.options.method = method;
- info.options.headers = this._mergeHeaders(headers);
- if (this.userAgent != null) {
- info.options.headers['user-agent'] = this.userAgent;
- }
- info.options.agent = this._getAgent(info.parsedUrl);
- // gives handlers an opportunity to participate
- if (this.handlers) {
- for (const handler of this.handlers) {
- handler.prepareRequest(info.options);
- }
- }
- return info;
+ /**
+ * Adds an HTML list to the summary buffer
+ *
+ * @param {string[]} items list of items to render
+ * @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
+ *
+ * @returns {Summary} summary instance
+ */
+ addList(items, ordered = false) {
+ const tag = ordered ? 'ol' : 'ul';
+ const listItems = items.map(item => this.wrap('li', item)).join('');
+ const element = this.wrap(tag, listItems);
+ return this.addRaw(element).addEOL();
}
- _mergeHeaders(headers) {
- if (this.requestOptions && this.requestOptions.headers) {
- return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
- }
- return lowercaseKeys(headers || {});
+ /**
+ * Adds an HTML table to the summary buffer
+ *
+ * @param {SummaryTableCell[]} rows table rows
+ *
+ * @returns {Summary} summary instance
+ */
+ addTable(rows) {
+ const tableBody = rows
+ .map(row => {
+ const cells = row
+ .map(cell => {
+ if (typeof cell === 'string') {
+ return this.wrap('td', cell);
+ }
+ const { header, data, colspan, rowspan } = cell;
+ const tag = header ? 'th' : 'td';
+ const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
+ return this.wrap(tag, data, attrs);
+ })
+ .join('');
+ return this.wrap('tr', cells);
+ })
+ .join('');
+ const element = this.wrap('table', tableBody);
+ return this.addRaw(element).addEOL();
}
- _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
- let clientHeader;
- if (this.requestOptions && this.requestOptions.headers) {
- clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
- }
- return additionalHeaders[header] || clientHeader || _default;
+ /**
+ * Adds a collapsable HTML details element to the summary buffer
+ *
+ * @param {string} label text for the closed state
+ * @param {string} content collapsable content
+ *
+ * @returns {Summary} summary instance
+ */
+ addDetails(label, content) {
+ const element = this.wrap('details', this.wrap('summary', label) + content);
+ return this.addRaw(element).addEOL();
}
- _getAgent(parsedUrl) {
- let agent;
- const proxyUrl = pm.getProxyUrl(parsedUrl);
- const useProxy = proxyUrl && proxyUrl.hostname;
- if (this._keepAlive && useProxy) {
- agent = this._proxyAgent;
- }
- if (this._keepAlive && !useProxy) {
- agent = this._agent;
- }
- // if agent is already assigned use that agent.
- if (agent) {
- return agent;
- }
- const usingSsl = parsedUrl.protocol === 'https:';
- let maxSockets = 100;
- if (this.requestOptions) {
- maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
- }
- // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
- if (proxyUrl && proxyUrl.hostname) {
- const agentOptions = {
- maxSockets,
- keepAlive: this._keepAlive,
- proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
- proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
- })), { host: proxyUrl.hostname, port: proxyUrl.port })
- };
- let tunnelAgent;
- const overHttps = proxyUrl.protocol === 'https:';
- if (usingSsl) {
- tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
- }
- else {
- tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
- }
- agent = tunnelAgent(agentOptions);
- this._proxyAgent = agent;
- }
- // if reusing agent across request and tunneling agent isn't assigned create a new agent
- if (this._keepAlive && !agent) {
- const options = { keepAlive: this._keepAlive, maxSockets };
- agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
- this._agent = agent;
- }
- // if not using private agent and tunnel agent isn't setup then use global agent
- if (!agent) {
- agent = usingSsl ? https.globalAgent : http.globalAgent;
- }
- if (usingSsl && this._ignoreSslError) {
- // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
- // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
- // we have to cast it to any and change it directly
- agent.options = Object.assign(agent.options || {}, {
- rejectUnauthorized: false
- });
- }
- return agent;
+ /**
+ * Adds an HTML image tag to the summary buffer
+ *
+ * @param {string} src path to the image you to embed
+ * @param {string} alt text description of the image
+ * @param {SummaryImageOptions} options (optional) addition image attributes
+ *
+ * @returns {Summary} summary instance
+ */
+ addImage(src, alt, options) {
+ const { width, height } = options || {};
+ const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
+ const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
+ return this.addRaw(element).addEOL();
}
- _performExponentialBackoff(retryNumber) {
- return __awaiter(this, void 0, void 0, function* () {
- retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
- const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
- return new Promise(resolve => setTimeout(() => resolve(), ms));
- });
+ /**
+ * Adds an HTML section heading element
+ *
+ * @param {string} text heading text
+ * @param {number | string} [level=1] (optional) the heading level, default: 1
+ *
+ * @returns {Summary} summary instance
+ */
+ addHeading(text, level) {
+ const tag = `h${level}`;
+ const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
+ ? tag
+ : 'h1';
+ const element = this.wrap(allowedTag, text);
+ return this.addRaw(element).addEOL();
}
- _processResponse(res, options) {
- return __awaiter(this, void 0, void 0, function* () {
- return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
- const statusCode = res.message.statusCode || 0;
- const response = {
- statusCode,
- result: null,
- headers: {}
- };
- // not found leads to null obj returned
- if (statusCode === HttpCodes.NotFound) {
- resolve(response);
- }
- // get the result from the body
- function dateTimeDeserializer(key, value) {
- if (typeof value === 'string') {
- const a = new Date(value);
- if (!isNaN(a.valueOf())) {
- return a;
- }
- }
- return value;
- }
- let obj;
- let contents;
- try {
- contents = yield res.readBody();
- if (contents && contents.length > 0) {
- if (options && options.deserializeDates) {
- obj = JSON.parse(contents, dateTimeDeserializer);
- }
- else {
- obj = JSON.parse(contents);
- }
- response.result = obj;
- }
- response.headers = res.message.headers;
- }
- catch (err) {
- // Invalid resource (contents not json); leaving result obj null
- }
- // note that 3xx redirects are handled by the http layer.
- if (statusCode > 299) {
- let msg;
- // if exception/error in body, attempt to get better error
- if (obj && obj.message) {
- msg = obj.message;
- }
- else if (contents && contents.length > 0) {
- // it may be the case that the exception is in the body message as string
- msg = contents;
- }
- else {
- msg = `Failed request: (${statusCode})`;
- }
- const err = new HttpClientError(msg, statusCode);
- err.result = response.result;
- reject(err);
- }
- else {
- resolve(response);
- }
- }));
- });
- }
-}
-exports.HttpClient = HttpClient;
-const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
-//# sourceMappingURL=index.js.map
-
-/***/ }),
-
-/***/ 9835:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.checkBypass = exports.getProxyUrl = void 0;
-function getProxyUrl(reqUrl) {
- const usingSsl = reqUrl.protocol === 'https:';
- if (checkBypass(reqUrl)) {
- return undefined;
- }
- const proxyVar = (() => {
- if (usingSsl) {
- return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
- }
- else {
- return process.env['http_proxy'] || process.env['HTTP_PROXY'];
- }
- })();
- if (proxyVar) {
- return new URL(proxyVar);
- }
- else {
- return undefined;
- }
-}
-exports.getProxyUrl = getProxyUrl;
-function checkBypass(reqUrl) {
- if (!reqUrl.hostname) {
- return false;
- }
- const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
- if (!noProxy) {
- return false;
- }
- // Determine the request port
- let reqPort;
- if (reqUrl.port) {
- reqPort = Number(reqUrl.port);
- }
- else if (reqUrl.protocol === 'http:') {
- reqPort = 80;
+ /**
+ * Adds an HTML thematic break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
+ */
+ addSeparator() {
+ const element = this.wrap('hr', null);
+ return this.addRaw(element).addEOL();
}
- else if (reqUrl.protocol === 'https:') {
- reqPort = 443;
+ /**
+ * Adds an HTML line break (
) to the summary buffer
+ *
+ * @returns {Summary} summary instance
+ */
+ addBreak() {
+ const element = this.wrap('br', null);
+ return this.addRaw(element).addEOL();
}
- // Format the request hostname and hostname with port
- const upperReqHosts = [reqUrl.hostname.toUpperCase()];
- if (typeof reqPort === 'number') {
- upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ /**
+ * Adds an HTML blockquote to the summary buffer
+ *
+ * @param {string} text quote text
+ * @param {string} cite (optional) citation url
+ *
+ * @returns {Summary} summary instance
+ */
+ addQuote(text, cite) {
+ const attrs = Object.assign({}, (cite && { cite }));
+ const element = this.wrap('blockquote', text, attrs);
+ return this.addRaw(element).addEOL();
}
- // Compare request host against noproxy
- for (const upperNoProxyItem of noProxy
- .split(',')
- .map(x => x.trim().toUpperCase())
- .filter(x => x)) {
- if (upperReqHosts.some(x => x === upperNoProxyItem)) {
- return true;
- }
+ /**
+ * Adds an HTML anchor tag to the summary buffer
+ *
+ * @param {string} text link text/content
+ * @param {string} href hyperlink
+ *
+ * @returns {Summary} summary instance
+ */
+ addLink(text, href) {
+ const element = this.wrap('a', text, { href });
+ return this.addRaw(element).addEOL();
}
- return false;
}
-exports.checkBypass = checkBypass;
-//# sourceMappingURL=proxy.js.map
+const _summary = new Summary();
+/**
+ * @deprecated use `core.summary`
+ */
+exports.markdownSummary = _summary;
+exports.summary = _summary;
+//# sourceMappingURL=summary.js.map
/***/ }),
-/***/ 334:
+/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
-
+// We use any as a valid input type
+/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
-const REGEX_IS_INSTALLATION = /^ghs_/;
-const REGEX_IS_USER_TO_SERVER = /^ghu_/;
-async function auth(token) {
- const isApp = token.split(/\./).length === 3;
- const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
- const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
- const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
- return {
- type: "token",
- token: token,
- tokenType
- };
+exports.toCommandProperties = exports.toCommandValue = void 0;
+/**
+ * Sanitizes an input into a string so it can be passed into issueCommand safely
+ * @param input input to sanitize into a string
+ */
+function toCommandValue(input) {
+ if (input === null || input === undefined) {
+ return '';
+ }
+ else if (typeof input === 'string' || input instanceof String) {
+ return input;
+ }
+ return JSON.stringify(input);
}
-
+exports.toCommandValue = toCommandValue;
/**
- * Prefix token for usage in the Authorization header
*
- * @param token OAuth token or JSON Web Token
+ * @param annotationProperties
+ * @returns The command properties to send with the actual annotation command
+ * See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/
-function withAuthorizationPrefix(token) {
- if (token.split(/\./).length === 3) {
- return `bearer ${token}`;
- }
-
- return `token ${token}`;
-}
-
-async function hook(token, request, route, parameters) {
- const endpoint = request.endpoint.merge(route, parameters);
- endpoint.headers.authorization = withAuthorizationPrefix(token);
- return request(endpoint);
+function toCommandProperties(annotationProperties) {
+ if (!Object.keys(annotationProperties).length) {
+ return {};
+ }
+ return {
+ title: annotationProperties.title,
+ file: annotationProperties.file,
+ line: annotationProperties.startLine,
+ endLine: annotationProperties.endLine,
+ col: annotationProperties.startColumn,
+ endColumn: annotationProperties.endColumn
+ };
}
-
-const createTokenAuth = function createTokenAuth(token) {
- if (!token) {
- throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
- }
-
- if (typeof token !== "string") {
- throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
- }
-
- token = token.replace(/^(token|bearer) +/i, "");
- return Object.assign(auth.bind(null, token), {
- hook: hook.bind(null, token)
- });
-};
-
-exports.createTokenAuth = createTokenAuth;
-//# sourceMappingURL=index.js.map
-
+exports.toCommandProperties = toCommandProperties;
+//# sourceMappingURL=utils.js.map
/***/ }),
-/***/ 6762:
+/***/ 4087:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-
Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.Context = void 0;
+const fs_1 = __nccwpck_require__(7147);
+const os_1 = __nccwpck_require__(2037);
+class Context {
+ /**
+ * Hydrate the context from the environment
+ */
+ constructor() {
+ var _a, _b, _c;
+ this.payload = {};
+ if (process.env.GITHUB_EVENT_PATH) {
+ if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
+ this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
+ }
+ else {
+ const path = process.env.GITHUB_EVENT_PATH;
+ process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
+ }
+ }
+ this.eventName = process.env.GITHUB_EVENT_NAME;
+ this.sha = process.env.GITHUB_SHA;
+ this.ref = process.env.GITHUB_REF;
+ this.workflow = process.env.GITHUB_WORKFLOW;
+ this.action = process.env.GITHUB_ACTION;
+ this.actor = process.env.GITHUB_ACTOR;
+ this.job = process.env.GITHUB_JOB;
+ this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
+ this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
+ this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
+ this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
+ this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
+ }
+ get issue() {
+ const payload = this.payload;
+ return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
+ }
+ get repo() {
+ if (process.env.GITHUB_REPOSITORY) {
+ const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
+ return { owner, repo };
+ }
+ if (this.payload.repository) {
+ return {
+ owner: this.payload.repository.owner.login,
+ repo: this.payload.repository.name
+ };
+ }
+ throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'");
+ }
+}
+exports.Context = Context;
+//# sourceMappingURL=context.js.map
-var universalUserAgent = __nccwpck_require__(5030);
-var beforeAfterHook = __nccwpck_require__(3682);
-var request = __nccwpck_require__(6234);
-var graphql = __nccwpck_require__(8467);
-var authToken = __nccwpck_require__(334);
-
-function _objectWithoutPropertiesLoose(source, excluded) {
- if (source == null) return {};
- var target = {};
- var sourceKeys = Object.keys(source);
- var key, i;
-
- for (i = 0; i < sourceKeys.length; i++) {
- key = sourceKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- target[key] = source[key];
- }
+/***/ }),
- return target;
-}
+/***/ 5438:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-function _objectWithoutProperties(source, excluded) {
- if (source == null) return {};
+"use strict";
- var target = _objectWithoutPropertiesLoose(source, excluded);
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOctokit = exports.context = void 0;
+const Context = __importStar(__nccwpck_require__(4087));
+const utils_1 = __nccwpck_require__(3030);
+exports.context = new Context.Context();
+/**
+ * Returns a hydrated octokit ready to use for GitHub Actions
+ *
+ * @param token the repo PAT or GITHUB_TOKEN
+ * @param options other options to set
+ */
+function getOctokit(token, options, ...additionalPlugins) {
+ const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins);
+ return new GitHubWithPlugins(utils_1.getOctokitOptions(token, options));
+}
+exports.getOctokit = getOctokit;
+//# sourceMappingURL=github.js.map
- var key, i;
+/***/ }),
- if (Object.getOwnPropertySymbols) {
- var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
+/***/ 7914:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- for (i = 0; i < sourceSymbolKeys.length; i++) {
- key = sourceSymbolKeys[i];
- if (excluded.indexOf(key) >= 0) continue;
- if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
- target[key] = source[key];
- }
- }
+"use strict";
- return target;
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
+const httpClient = __importStar(__nccwpck_require__(6255));
+function getAuthString(token, options) {
+ if (!token && !options.auth) {
+ throw new Error('Parameter token or opts.auth is required');
+ }
+ else if (token && options.auth) {
+ throw new Error('Parameters token and opts.auth may not both be specified');
+ }
+ return typeof options.auth === 'string' ? options.auth : `token ${token}`;
+}
+exports.getAuthString = getAuthString;
+function getProxyAgent(destinationUrl) {
+ const hc = new httpClient.HttpClient();
+ return hc.getAgent(destinationUrl);
+}
+exports.getProxyAgent = getProxyAgent;
+function getApiBaseUrl() {
+ return process.env['GITHUB_API_URL'] || 'https://api.github.com';
}
+exports.getApiBaseUrl = getApiBaseUrl;
+//# sourceMappingURL=utils.js.map
-const VERSION = "3.6.0";
+/***/ }),
-const _excluded = ["authStrategy"];
-class Octokit {
- constructor(options = {}) {
- const hook = new beforeAfterHook.Collection();
- const requestDefaults = {
- baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
- headers: {},
- request: Object.assign({}, options.request, {
- // @ts-ignore internal usage only, no need to type
- hook: hook.bind(null, "request")
- }),
- mediaType: {
- previews: [],
- format: ""
- }
- }; // prepend default user agent with `options.userAgent` if set
+/***/ 3030:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
- requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
+"use strict";
- if (options.baseUrl) {
- requestDefaults.baseUrl = options.baseUrl;
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0;
+const Context = __importStar(__nccwpck_require__(4087));
+const Utils = __importStar(__nccwpck_require__(7914));
+// octokit + plugins
+const core_1 = __nccwpck_require__(6762);
+const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044);
+const plugin_paginate_rest_1 = __nccwpck_require__(4193);
+exports.context = new Context.Context();
+const baseUrl = Utils.getApiBaseUrl();
+exports.defaults = {
+ baseUrl,
+ request: {
+ agent: Utils.getProxyAgent(baseUrl)
}
-
- if (options.previews) {
- requestDefaults.mediaType.previews = options.previews;
+};
+exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults);
+/**
+ * Convience function to correctly format Octokit Options to pass into the constructor.
+ *
+ * @param token the repo PAT or GITHUB_TOKEN
+ * @param options other options to set
+ */
+function getOctokitOptions(token, options) {
+ const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller
+ // Auth
+ const auth = Utils.getAuthString(token, opts);
+ if (auth) {
+ opts.auth = auth;
}
+ return opts;
+}
+exports.getOctokitOptions = getOctokitOptions;
+//# sourceMappingURL=utils.js.map
- if (options.timeZone) {
- requestDefaults.headers["time-zone"] = options.timeZone;
- }
+/***/ }),
- this.request = request.request.defaults(requestDefaults);
- this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
- this.log = Object.assign({
- debug: () => {},
- info: () => {},
- warn: console.warn.bind(console),
- error: console.error.bind(console)
- }, options.log);
- this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
- // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
- // (2) If only `options.auth` is set, use the default token authentication strategy.
- // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
- // TODO: type `options.auth` based on `options.authStrategy`.
-
- if (!options.authStrategy) {
- if (!options.auth) {
- // (1)
- this.auth = async () => ({
- type: "unauthenticated"
- });
- } else {
- // (2)
- const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
-
- hook.wrap("request", auth.hook);
- this.auth = auth;
- }
- } else {
- const {
- authStrategy
- } = options,
- otherOptions = _objectWithoutProperties(options, _excluded);
-
- const auth = authStrategy(Object.assign({
- request: this.request,
- log: this.log,
- // we pass the current octokit instance as well as its constructor options
- // to allow for authentication strategies that return a new octokit instance
- // that shares the same internal state as the current one. The original
- // requirement for this was the "event-octokit" authentication strategy
- // of https://github.com/probot/octokit-auth-probot.
- octokit: this,
- octokitOptions: otherOptions
- }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
-
- hook.wrap("request", auth.hook);
- this.auth = auth;
- } // apply plugins
- // https://stackoverflow.com/a/16345172
-
-
- const classConstructor = this.constructor;
- classConstructor.plugins.forEach(plugin => {
- Object.assign(this, plugin(this, options));
- });
- }
-
- static defaults(defaults) {
- const OctokitWithDefaults = class extends this {
- constructor(...args) {
- const options = args[0] || {};
-
- if (typeof defaults === "function") {
- super(defaults(options));
- return;
- }
-
- super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
- userAgent: `${options.userAgent} ${defaults.userAgent}`
- } : null));
- }
-
- };
- return OctokitWithDefaults;
- }
- /**
- * Attach a plugin (or many) to your Octokit instance.
- *
- * @example
- * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
- */
-
-
- static plugin(...newPlugins) {
- var _a;
-
- const currentPlugins = this.plugins;
- const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
- return NewOctokit;
- }
-
-}
-Octokit.VERSION = VERSION;
-Octokit.plugins = [];
-
-exports.Octokit = Octokit;
-//# sourceMappingURL=index.js.map
-
-
-/***/ }),
-
-/***/ 9440:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+/***/ 5526:
+/***/ (function(__unused_webpack_module, exports) {
"use strict";
-
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-var isPlainObject = __nccwpck_require__(3287);
-var universalUserAgent = __nccwpck_require__(5030);
-
-function lowercaseKeys(object) {
- if (!object) {
- return {};
- }
-
- return Object.keys(object).reduce((newObj, key) => {
- newObj[key.toLowerCase()] = object[key];
- return newObj;
- }, {});
-}
-
-function mergeDeep(defaults, options) {
- const result = Object.assign({}, defaults);
- Object.keys(options).forEach(key => {
- if (isPlainObject.isPlainObject(options[key])) {
- if (!(key in defaults)) Object.assign(result, {
- [key]: options[key]
- });else result[key] = mergeDeep(defaults[key], options[key]);
- } else {
- Object.assign(result, {
- [key]: options[key]
- });
+exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
+class BasicCredentialHandler {
+ constructor(username, password) {
+ this.username = username;
+ this.password = password;
}
- });
- return result;
-}
-
-function removeUndefinedProperties(obj) {
- for (const key in obj) {
- if (obj[key] === undefined) {
- delete obj[key];
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
}
- }
-
- return obj;
}
-
-function merge(defaults, route, options) {
- if (typeof route === "string") {
- let [method, url] = route.split(" ");
- options = Object.assign(url ? {
- method,
- url
- } : {
- url: method
- }, options);
- } else {
- options = Object.assign({}, route);
- } // lowercase header names before merging with defaults to avoid duplicates
-
-
- options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
-
- removeUndefinedProperties(options);
- removeUndefinedProperties(options.headers);
- const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
-
- if (defaults && defaults.mediaType.previews.length) {
- mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
- }
-
- mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
- return mergedOptions;
+exports.BasicCredentialHandler = BasicCredentialHandler;
+class BearerCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Bearer ${this.token}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
+ }
}
-
-function addQueryParameters(url, parameters) {
- const separator = /\?/.test(url) ? "&" : "?";
- const names = Object.keys(parameters);
-
- if (names.length === 0) {
- return url;
- }
-
- return url + separator + names.map(name => {
- if (name === "q") {
- return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+exports.BearerCredentialHandler = BearerCredentialHandler;
+class PersonalAccessTokenCredentialHandler {
+ constructor(token) {
+ this.token = token;
+ }
+ // currently implements pre-authorization
+ // TODO: support preAuth = false where it hooks on 401
+ prepareRequest(options) {
+ if (!options.headers) {
+ throw Error('The request has no headers');
+ }
+ options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
+ }
+ // This handler cannot handle 401
+ canHandleAuthentication() {
+ return false;
+ }
+ handleAuthentication() {
+ return __awaiter(this, void 0, void 0, function* () {
+ throw new Error('not implemented');
+ });
}
-
- return `${name}=${encodeURIComponent(parameters[name])}`;
- }).join("&");
}
+exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
+//# sourceMappingURL=auth.js.map
-const urlVariableRegex = /\{[^}]+\}/g;
+/***/ }),
-function removeNonChars(variableName) {
- return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
-}
+/***/ 6255:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
-function extractUrlVariableNames(url) {
- const matches = url.match(urlVariableRegex);
+"use strict";
- if (!matches) {
- return [];
- }
-
- return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
-}
-
-function omit(object, keysToOmit) {
- return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
- obj[key] = object[key];
- return obj;
- }, {});
+/* eslint-disable @typescript-eslint/no-explicit-any */
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
+ Object.defineProperty(o, "default", { enumerable: true, value: v });
+}) : function(o, v) {
+ o["default"] = v;
+});
+var __importStar = (this && this.__importStar) || function (mod) {
+ if (mod && mod.__esModule) return mod;
+ var result = {};
+ if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
+ __setModuleDefault(result, mod);
+ return result;
+};
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
+const http = __importStar(__nccwpck_require__(3685));
+const https = __importStar(__nccwpck_require__(5687));
+const pm = __importStar(__nccwpck_require__(9835));
+const tunnel = __importStar(__nccwpck_require__(4294));
+var HttpCodes;
+(function (HttpCodes) {
+ HttpCodes[HttpCodes["OK"] = 200] = "OK";
+ HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
+ HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
+ HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
+ HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
+ HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
+ HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
+ HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
+ HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
+ HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
+ HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
+ HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
+ HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
+ HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
+ HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
+ HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
+ HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
+ HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
+ HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
+ HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
+ HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
+ HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
+ HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
+ HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
+ HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
+ HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
+ HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
+})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
+var Headers;
+(function (Headers) {
+ Headers["Accept"] = "accept";
+ Headers["ContentType"] = "content-type";
+})(Headers = exports.Headers || (exports.Headers = {}));
+var MediaTypes;
+(function (MediaTypes) {
+ MediaTypes["ApplicationJson"] = "application/json";
+})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
+/**
+ * Returns the proxy URL, depending upon the supplied url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+function getProxyUrl(serverUrl) {
+ const proxyUrl = pm.getProxyUrl(new URL(serverUrl));
+ return proxyUrl ? proxyUrl.href : '';
}
-
-// Based on https://github.com/bramstein/url-template, licensed under BSD
-// TODO: create separate package.
-//
-// Copyright (c) 2012-2014, Bram Stein
-// All rights reserved.
-// Redistribution and use in source and binary forms, with or without
-// modification, are permitted provided that the following conditions
-// are met:
-// 1. Redistributions of source code must retain the above copyright
-// notice, this list of conditions and the following disclaimer.
-// 2. Redistributions in binary form must reproduce the above copyright
-// notice, this list of conditions and the following disclaimer in the
-// documentation and/or other materials provided with the distribution.
-// 3. The name of the author may not be used to endorse or promote products
-// derived from this software without specific prior written permission.
-// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
-// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
-// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
-// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
-// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
-// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
-// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
-// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
-// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
-// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-
-/* istanbul ignore file */
-function encodeReserved(str) {
- return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
- if (!/%[0-9A-Fa-f]/.test(part)) {
- part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+exports.getProxyUrl = getProxyUrl;
+const HttpRedirectCodes = [
+ HttpCodes.MovedPermanently,
+ HttpCodes.ResourceMoved,
+ HttpCodes.SeeOther,
+ HttpCodes.TemporaryRedirect,
+ HttpCodes.PermanentRedirect
+];
+const HttpResponseRetryCodes = [
+ HttpCodes.BadGateway,
+ HttpCodes.ServiceUnavailable,
+ HttpCodes.GatewayTimeout
+];
+const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
+const ExponentialBackoffCeiling = 10;
+const ExponentialBackoffTimeSlice = 5;
+class HttpClientError extends Error {
+ constructor(message, statusCode) {
+ super(message);
+ this.name = 'HttpClientError';
+ this.statusCode = statusCode;
+ Object.setPrototypeOf(this, HttpClientError.prototype);
}
-
- return part;
- }).join("");
-}
-
-function encodeUnreserved(str) {
- return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
- return "%" + c.charCodeAt(0).toString(16).toUpperCase();
- });
-}
-
-function encodeValue(operator, value, key) {
- value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
-
- if (key) {
- return encodeUnreserved(key) + "=" + value;
- } else {
- return value;
- }
}
-
-function isDefined(value) {
- return value !== undefined && value !== null;
+exports.HttpClientError = HttpClientError;
+class HttpClientResponse {
+ constructor(message) {
+ this.message = message;
+ }
+ readBody() {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () {
+ let output = Buffer.alloc(0);
+ this.message.on('data', (chunk) => {
+ output = Buffer.concat([output, chunk]);
+ });
+ this.message.on('end', () => {
+ resolve(output.toString());
+ });
+ }));
+ });
+ }
}
-
-function isKeyOperator(operator) {
- return operator === ";" || operator === "&" || operator === "?";
+exports.HttpClientResponse = HttpClientResponse;
+function isHttps(requestUrl) {
+ const parsedUrl = new URL(requestUrl);
+ return parsedUrl.protocol === 'https:';
}
-
-function getValues(context, operator, key, modifier) {
- var value = context[key],
- result = [];
-
- if (isDefined(value) && value !== "") {
- if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
- value = value.toString();
-
- if (modifier && modifier !== "*") {
- value = value.substring(0, parseInt(modifier, 10));
- }
-
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- } else {
- if (modifier === "*") {
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- result.push(encodeValue(operator, value[k], k));
+exports.isHttps = isHttps;
+class HttpClient {
+ constructor(userAgent, handlers, requestOptions) {
+ this._ignoreSslError = false;
+ this._allowRedirects = true;
+ this._allowRedirectDowngrade = false;
+ this._maxRedirects = 50;
+ this._allowRetries = false;
+ this._maxRetries = 1;
+ this._keepAlive = false;
+ this._disposed = false;
+ this.userAgent = userAgent;
+ this.handlers = handlers || [];
+ this.requestOptions = requestOptions;
+ if (requestOptions) {
+ if (requestOptions.ignoreSslError != null) {
+ this._ignoreSslError = requestOptions.ignoreSslError;
}
- });
- }
- } else {
- const tmp = [];
-
- if (Array.isArray(value)) {
- value.filter(isDefined).forEach(function (value) {
- tmp.push(encodeValue(operator, value));
- });
- } else {
- Object.keys(value).forEach(function (k) {
- if (isDefined(value[k])) {
- tmp.push(encodeUnreserved(k));
- tmp.push(encodeValue(operator, value[k].toString()));
+ this._socketTimeout = requestOptions.socketTimeout;
+ if (requestOptions.allowRedirects != null) {
+ this._allowRedirects = requestOptions.allowRedirects;
+ }
+ if (requestOptions.allowRedirectDowngrade != null) {
+ this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
+ }
+ if (requestOptions.maxRedirects != null) {
+ this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
+ }
+ if (requestOptions.keepAlive != null) {
+ this._keepAlive = requestOptions.keepAlive;
+ }
+ if (requestOptions.allowRetries != null) {
+ this._allowRetries = requestOptions.allowRetries;
+ }
+ if (requestOptions.maxRetries != null) {
+ this._maxRetries = requestOptions.maxRetries;
}
- });
- }
-
- if (isKeyOperator(operator)) {
- result.push(encodeUnreserved(key) + "=" + tmp.join(","));
- } else if (tmp.length !== 0) {
- result.push(tmp.join(","));
}
- }
}
- } else {
- if (operator === ";") {
- if (isDefined(value)) {
- result.push(encodeUnreserved(key));
- }
- } else if (value === "" && (operator === "&" || operator === "?")) {
- result.push(encodeUnreserved(key) + "=");
- } else if (value === "") {
- result.push("");
+ options(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
+ });
}
- }
-
- return result;
-}
-
-function parseUrl(template) {
- return {
- expand: expand.bind(null, template)
- };
-}
-
-function expand(template, context) {
- var operators = ["+", "#", ".", "/", ";", "?", "&"];
- return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
- if (expression) {
- let operator = "";
- const values = [];
-
- if (operators.indexOf(expression.charAt(0)) !== -1) {
- operator = expression.charAt(0);
- expression = expression.substr(1);
- }
-
- expression.split(/,/g).forEach(function (variable) {
- var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
- values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
- });
-
- if (operator && operator !== "+") {
- var separator = ",";
-
- if (operator === "?") {
- separator = "&";
- } else if (operator !== "#") {
- separator = operator;
- }
-
- return (values.length !== 0 ? operator : "") + values.join(separator);
- } else {
- return values.join(",");
- }
- } else {
- return encodeReserved(literal);
+ get(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('GET', requestUrl, null, additionalHeaders || {});
+ });
}
- });
-}
-
-function parse(options) {
- // https://fetch.spec.whatwg.org/#methods
- let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
-
- let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
- let headers = Object.assign({}, options.headers);
- let body;
- let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
-
- const urlVariableNames = extractUrlVariableNames(url);
- url = parseUrl(url).expand(parameters);
-
- if (!/^http/.test(url)) {
- url = options.baseUrl + url;
- }
-
- const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
- const remainingParameters = omit(parameters, omittedParameters);
- const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
-
- if (!isBinaryRequest) {
- if (options.mediaType.format) {
- // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
- headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ del(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('DELETE', requestUrl, null, additionalHeaders || {});
+ });
}
-
- if (options.mediaType.previews.length) {
- const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
- headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
- const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
- return `application/vnd.github.${preview}-preview${format}`;
- }).join(",");
+ post(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('POST', requestUrl, data, additionalHeaders || {});
+ });
}
- } // for GET/HEAD requests, set URL query parameters from remaining parameters
- // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
-
-
- if (["GET", "HEAD"].includes(method)) {
- url = addQueryParameters(url, remainingParameters);
- } else {
- if ("data" in remainingParameters) {
- body = remainingParameters.data;
- } else {
- if (Object.keys(remainingParameters).length) {
- body = remainingParameters;
- } else {
- headers["content-length"] = 0;
- }
+ patch(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PATCH', requestUrl, data, additionalHeaders || {});
+ });
}
- } // default content-type for JSON if body is set
-
-
- if (!headers["content-type"] && typeof body !== "undefined") {
- headers["content-type"] = "application/json; charset=utf-8";
- } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
- // fetch does not allow to set `content-length` header, but we can set body to an empty string
-
-
- if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
- body = "";
- } // Only return body/request keys if present
-
-
- return Object.assign({
- method,
- url,
- headers
- }, typeof body !== "undefined" ? {
- body
- } : null, options.request ? {
- request: options.request
- } : null);
-}
-
-function endpointWithDefaults(defaults, route, options) {
- return parse(merge(defaults, route, options));
-}
-
-function withDefaults(oldDefaults, newDefaults) {
- const DEFAULTS = merge(oldDefaults, newDefaults);
- const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
- return Object.assign(endpoint, {
- DEFAULTS,
- defaults: withDefaults.bind(null, DEFAULTS),
- merge: merge.bind(null, DEFAULTS),
- parse
- });
-}
-
-const VERSION = "6.0.12";
-
-const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
-// So we use RequestParameters and add method as additional required property.
-
-const DEFAULTS = {
- method: "GET",
- baseUrl: "https://api.github.com",
- headers: {
- accept: "application/vnd.github.v3+json",
- "user-agent": userAgent
- },
- mediaType: {
- format: "",
- previews: []
- }
-};
-
-const endpoint = withDefaults(null, DEFAULTS);
-
-exports.endpoint = endpoint;
-//# sourceMappingURL=index.js.map
-
-
-/***/ }),
-
-/***/ 8467:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-var request = __nccwpck_require__(6234);
-var universalUserAgent = __nccwpck_require__(5030);
-
-const VERSION = "4.8.0";
-
-function _buildMessageForResponseErrors(data) {
- return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
-}
-
-class GraphqlResponseError extends Error {
- constructor(request, headers, response) {
- super(_buildMessageForResponseErrors(response));
- this.request = request;
- this.headers = headers;
- this.response = response;
- this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
-
- this.errors = response.errors;
- this.data = response.data; // Maintains proper stack trace (only available on V8)
-
- /* istanbul ignore next */
-
- if (Error.captureStackTrace) {
- Error.captureStackTrace(this, this.constructor);
+ put(requestUrl, data, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('PUT', requestUrl, data, additionalHeaders || {});
+ });
}
- }
-
-}
-
-const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
-const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
-const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
-function graphql(request, query, options) {
- if (options) {
- if (typeof query === "string" && "query" in options) {
- return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
+ head(requestUrl, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request('HEAD', requestUrl, null, additionalHeaders || {});
+ });
}
-
- for (const key in options) {
- if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
- return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
+ sendStream(verb, requestUrl, stream, additionalHeaders) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return this.request(verb, requestUrl, stream, additionalHeaders);
+ });
}
- }
-
- const parsedOptions = typeof query === "string" ? Object.assign({
- query
- }, options) : query;
- const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
- if (NON_VARIABLE_OPTIONS.includes(key)) {
- result[key] = parsedOptions[key];
- return result;
+ /**
+ * Gets a typed object from an endpoint
+ * Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
+ */
+ getJson(requestUrl, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ const res = yield this.get(requestUrl, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
}
-
- if (!result.variables) {
- result.variables = {};
+ postJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.post(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
}
-
- result.variables[key] = parsedOptions[key];
- return result;
- }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
- // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
-
- const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
-
- if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
- requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
- }
-
- return request(requestOptions).then(response => {
- if (response.data.errors) {
- const headers = {};
-
- for (const key of Object.keys(response.headers)) {
- headers[key] = response.headers[key];
- }
-
- throw new GraphqlResponseError(requestOptions, headers, response.data);
+ putJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.put(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
}
-
- return response.data.data;
- });
-}
-
-function withDefaults(request$1, newDefaults) {
- const newRequest = request$1.defaults(newDefaults);
-
- const newApi = (query, options) => {
- return graphql(newRequest, query, options);
- };
-
- return Object.assign(newApi, {
- defaults: withDefaults.bind(null, newRequest),
- endpoint: request.request.endpoint
- });
-}
-
-const graphql$1 = withDefaults(request.request, {
- headers: {
- "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
- },
- method: "POST",
- url: "/graphql"
-});
-function withCustomRequest(customRequest) {
- return withDefaults(customRequest, {
- method: "POST",
- url: "/graphql"
- });
-}
-
-exports.GraphqlResponseError = GraphqlResponseError;
-exports.graphql = graphql$1;
-exports.withCustomRequest = withCustomRequest;
-//# sourceMappingURL=index.js.map
-
-
-/***/ }),
-
-/***/ 4193:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-const VERSION = "2.21.3";
-
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
-
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
- enumerableOnly && (symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- })), keys.push.apply(keys, symbols);
- }
-
- return keys;
-}
-
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = null != arguments[i] ? arguments[i] : {};
- i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
- }
-
- return target;
-}
-
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
- } else {
- obj[key] = value;
- }
-
- return obj;
-}
-
-/**
- * Some “list” response that can be paginated have a different response structure
- *
- * They have a `total_count` key in the response (search also has `incomplete_results`,
- * /installation/repositories also has `repository_selection`), as well as a key with
- * the list of the items which name varies from endpoint to endpoint.
- *
- * Octokit normalizes these responses so that paginated results are always returned following
- * the same structure. One challenge is that if the list response has only one page, no Link
- * header is provided, so this header alone is not sufficient to check wether a response is
- * paginated or not.
- *
- * We check if a "total_count" key is present in the response data, but also make sure that
- * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
- * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
- */
-function normalizePaginatedListResponse(response) {
- // endpoints can respond with 204 if repository is empty
- if (!response.data) {
- return _objectSpread2(_objectSpread2({}, response), {}, {
- data: []
- });
- }
-
- const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
- if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
- // to retrieve the same information.
-
- const incompleteResults = response.data.incomplete_results;
- const repositorySelection = response.data.repository_selection;
- const totalCount = response.data.total_count;
- delete response.data.incomplete_results;
- delete response.data.repository_selection;
- delete response.data.total_count;
- const namespaceKey = Object.keys(response.data)[0];
- const data = response.data[namespaceKey];
- response.data = data;
-
- if (typeof incompleteResults !== "undefined") {
- response.data.incomplete_results = incompleteResults;
- }
-
- if (typeof repositorySelection !== "undefined") {
- response.data.repository_selection = repositorySelection;
- }
-
- response.data.total_count = totalCount;
- return response;
-}
-
-function iterator(octokit, route, parameters) {
- const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
- const requestMethod = typeof route === "function" ? route : octokit.request;
- const method = options.method;
- const headers = options.headers;
- let url = options.url;
- return {
- [Symbol.asyncIterator]: () => ({
- async next() {
- if (!url) return {
- done: true
- };
-
- try {
- const response = await requestMethod({
- method,
- url,
- headers
- });
- const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
- // '; rel="next", ; rel="last"'
- // sets `url` to undefined if "next" URL is not present or `link` header is not set
-
- url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
- return {
- value: normalizedResponse
- };
- } catch (error) {
- if (error.status !== 409) throw error;
- url = "";
- return {
- value: {
- status: 200,
- headers: {},
- data: []
- }
- };
- }
- }
-
- })
- };
-}
-
-function paginate(octokit, route, parameters, mapFn) {
- if (typeof parameters === "function") {
- mapFn = parameters;
- parameters = undefined;
- }
-
- return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
-}
-
-function gather(octokit, results, iterator, mapFn) {
- return iterator.next().then(result => {
- if (result.done) {
- return results;
- }
-
- let earlyExit = false;
-
- function done() {
- earlyExit = true;
+ patchJson(requestUrl, obj, additionalHeaders = {}) {
+ return __awaiter(this, void 0, void 0, function* () {
+ const data = JSON.stringify(obj, null, 2);
+ additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
+ additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
+ const res = yield this.patch(requestUrl, data, additionalHeaders);
+ return this._processResponse(res, this.requestOptions);
+ });
}
-
- results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
-
- if (earlyExit) {
- return results;
+ /**
+ * Makes a raw http request.
+ * All other methods such as get, post, patch, and request ultimately call this.
+ * Prefer get, del, post and patch
+ */
+ request(verb, requestUrl, data, headers) {
+ return __awaiter(this, void 0, void 0, function* () {
+ if (this._disposed) {
+ throw new Error('Client has already been disposed.');
+ }
+ const parsedUrl = new URL(requestUrl);
+ let info = this._prepareRequest(verb, parsedUrl, headers);
+ // Only perform retries on reads since writes may not be idempotent.
+ const maxTries = this._allowRetries && RetryableHttpVerbs.includes(verb)
+ ? this._maxRetries + 1
+ : 1;
+ let numTries = 0;
+ let response;
+ do {
+ response = yield this.requestRaw(info, data);
+ // Check if it's an authentication challenge
+ if (response &&
+ response.message &&
+ response.message.statusCode === HttpCodes.Unauthorized) {
+ let authenticationHandler;
+ for (const handler of this.handlers) {
+ if (handler.canHandleAuthentication(response)) {
+ authenticationHandler = handler;
+ break;
+ }
+ }
+ if (authenticationHandler) {
+ return authenticationHandler.handleAuthentication(this, info, data);
+ }
+ else {
+ // We have received an unauthorized response but have no handlers to handle it.
+ // Let the response return to the caller.
+ return response;
+ }
+ }
+ let redirectsRemaining = this._maxRedirects;
+ while (response.message.statusCode &&
+ HttpRedirectCodes.includes(response.message.statusCode) &&
+ this._allowRedirects &&
+ redirectsRemaining > 0) {
+ const redirectUrl = response.message.headers['location'];
+ if (!redirectUrl) {
+ // if there's no location to redirect to, we won't
+ break;
+ }
+ const parsedRedirectUrl = new URL(redirectUrl);
+ if (parsedUrl.protocol === 'https:' &&
+ parsedUrl.protocol !== parsedRedirectUrl.protocol &&
+ !this._allowRedirectDowngrade) {
+ throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
+ }
+ // we need to finish reading the response before reassigning response
+ // which will leak the open socket.
+ yield response.readBody();
+ // strip authorization header if redirected to a different hostname
+ if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
+ for (const header in headers) {
+ // header names are case insensitive
+ if (header.toLowerCase() === 'authorization') {
+ delete headers[header];
+ }
+ }
+ }
+ // let's make the request with the new redirectUrl
+ info = this._prepareRequest(verb, parsedRedirectUrl, headers);
+ response = yield this.requestRaw(info, data);
+ redirectsRemaining--;
+ }
+ if (!response.message.statusCode ||
+ !HttpResponseRetryCodes.includes(response.message.statusCode)) {
+ // If not a retry code, return immediately instead of retrying
+ return response;
+ }
+ numTries += 1;
+ if (numTries < maxTries) {
+ yield response.readBody();
+ yield this._performExponentialBackoff(numTries);
+ }
+ } while (numTries < maxTries);
+ return response;
+ });
}
-
- return gather(octokit, results, iterator, mapFn);
- });
-}
-
-const composePaginateRest = Object.assign(paginate, {
- iterator
-});
-
-const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
-
-function isPaginatingEndpoint(arg) {
- if (typeof arg === "string") {
- return paginatingEndpoints.includes(arg);
- } else {
- return false;
- }
-}
-
-/**
- * @param octokit Octokit instance
- * @param options Options passed to Octokit constructor
- */
-
-function paginateRest(octokit) {
- return {
- paginate: Object.assign(paginate.bind(null, octokit), {
- iterator: iterator.bind(null, octokit)
- })
- };
-}
-paginateRest.VERSION = VERSION;
-
-exports.composePaginateRest = composePaginateRest;
-exports.isPaginatingEndpoint = isPaginatingEndpoint;
-exports.paginateRest = paginateRest;
-exports.paginatingEndpoints = paginatingEndpoints;
-//# sourceMappingURL=index.js.map
-
-
-/***/ }),
-
-/***/ 3044:
-/***/ ((__unused_webpack_module, exports) => {
-
-"use strict";
-
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-
-function ownKeys(object, enumerableOnly) {
- var keys = Object.keys(object);
-
- if (Object.getOwnPropertySymbols) {
- var symbols = Object.getOwnPropertySymbols(object);
-
- if (enumerableOnly) {
- symbols = symbols.filter(function (sym) {
- return Object.getOwnPropertyDescriptor(object, sym).enumerable;
- });
+ /**
+ * Needs to be called if keepAlive is set to true in request options.
+ */
+ dispose() {
+ if (this._agent) {
+ this._agent.destroy();
+ }
+ this._disposed = true;
}
-
- keys.push.apply(keys, symbols);
- }
-
- return keys;
-}
-
-function _objectSpread2(target) {
- for (var i = 1; i < arguments.length; i++) {
- var source = arguments[i] != null ? arguments[i] : {};
-
- if (i % 2) {
- ownKeys(Object(source), true).forEach(function (key) {
- _defineProperty(target, key, source[key]);
- });
- } else if (Object.getOwnPropertyDescriptors) {
- Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
- } else {
- ownKeys(Object(source)).forEach(function (key) {
- Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
- });
+ /**
+ * Raw request.
+ * @param info
+ * @param data
+ */
+ requestRaw(info, data) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => {
+ function callbackForResult(err, res) {
+ if (err) {
+ reject(err);
+ }
+ else if (!res) {
+ // If `err` is not passed, then `res` must be passed.
+ reject(new Error('Unknown error'));
+ }
+ else {
+ resolve(res);
+ }
+ }
+ this.requestRawWithCallback(info, data, callbackForResult);
+ });
+ });
}
- }
-
- return target;
-}
-
-function _defineProperty(obj, key, value) {
- if (key in obj) {
- Object.defineProperty(obj, key, {
- value: value,
- enumerable: true,
- configurable: true,
- writable: true
- });
- } else {
- obj[key] = value;
- }
-
- return obj;
-}
-
-const Endpoints = {
- actions: {
- addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
- addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
- addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
- approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
- cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
- createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
- createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
- createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
+ /**
+ * Raw request with callback.
+ * @param info
+ * @param data
+ * @param onResult
+ */
+ requestRawWithCallback(info, data, onResult) {
+ if (typeof data === 'string') {
+ if (!info.options.headers) {
+ info.options.headers = {};
+ }
+ info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
+ }
+ let callbackCalled = false;
+ function handleResult(err, res) {
+ if (!callbackCalled) {
+ callbackCalled = true;
+ onResult(err, res);
+ }
+ }
+ const req = info.httpModule.request(info.options, (msg) => {
+ const res = new HttpClientResponse(msg);
+ handleResult(undefined, res);
+ });
+ let socket;
+ req.on('socket', sock => {
+ socket = sock;
+ });
+ // If we ever get disconnected, we want the socket to timeout eventually
+ req.setTimeout(this._socketTimeout || 3 * 60000, () => {
+ if (socket) {
+ socket.end();
+ }
+ handleResult(new Error(`Request timeout: ${info.options.path}`));
+ });
+ req.on('error', function (err) {
+ // err has statusCode property
+ // res should have headers
+ handleResult(err);
+ });
+ if (data && typeof data === 'string') {
+ req.write(data, 'utf8');
+ }
+ if (data && typeof data !== 'string') {
+ data.on('close', function () {
+ req.end();
+ });
+ data.pipe(req);
+ }
+ else {
+ req.end();
+ }
+ }
+ /**
+ * Gets an http agent. This function is useful when you need an http agent that handles
+ * routing through a proxy server - depending upon the url and proxy environment variables.
+ * @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
+ */
+ getAgent(serverUrl) {
+ const parsedUrl = new URL(serverUrl);
+ return this._getAgent(parsedUrl);
+ }
+ _prepareRequest(method, requestUrl, headers) {
+ const info = {};
+ info.parsedUrl = requestUrl;
+ const usingSsl = info.parsedUrl.protocol === 'https:';
+ info.httpModule = usingSsl ? https : http;
+ const defaultPort = usingSsl ? 443 : 80;
+ info.options = {};
+ info.options.host = info.parsedUrl.hostname;
+ info.options.port = info.parsedUrl.port
+ ? parseInt(info.parsedUrl.port)
+ : defaultPort;
+ info.options.path =
+ (info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
+ info.options.method = method;
+ info.options.headers = this._mergeHeaders(headers);
+ if (this.userAgent != null) {
+ info.options.headers['user-agent'] = this.userAgent;
+ }
+ info.options.agent = this._getAgent(info.parsedUrl);
+ // gives handlers an opportunity to participate
+ if (this.handlers) {
+ for (const handler of this.handlers) {
+ handler.prepareRequest(info.options);
+ }
+ }
+ return info;
+ }
+ _mergeHeaders(headers) {
+ if (this.requestOptions && this.requestOptions.headers) {
+ return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers || {}));
+ }
+ return lowercaseKeys(headers || {});
+ }
+ _getExistingOrDefaultHeader(additionalHeaders, header, _default) {
+ let clientHeader;
+ if (this.requestOptions && this.requestOptions.headers) {
+ clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
+ }
+ return additionalHeaders[header] || clientHeader || _default;
+ }
+ _getAgent(parsedUrl) {
+ let agent;
+ const proxyUrl = pm.getProxyUrl(parsedUrl);
+ const useProxy = proxyUrl && proxyUrl.hostname;
+ if (this._keepAlive && useProxy) {
+ agent = this._proxyAgent;
+ }
+ if (this._keepAlive && !useProxy) {
+ agent = this._agent;
+ }
+ // if agent is already assigned use that agent.
+ if (agent) {
+ return agent;
+ }
+ const usingSsl = parsedUrl.protocol === 'https:';
+ let maxSockets = 100;
+ if (this.requestOptions) {
+ maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
+ }
+ // This is `useProxy` again, but we need to check `proxyURl` directly for TypeScripts's flow analysis.
+ if (proxyUrl && proxyUrl.hostname) {
+ const agentOptions = {
+ maxSockets,
+ keepAlive: this._keepAlive,
+ proxy: Object.assign(Object.assign({}, ((proxyUrl.username || proxyUrl.password) && {
+ proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
+ })), { host: proxyUrl.hostname, port: proxyUrl.port })
+ };
+ let tunnelAgent;
+ const overHttps = proxyUrl.protocol === 'https:';
+ if (usingSsl) {
+ tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
+ }
+ else {
+ tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
+ }
+ agent = tunnelAgent(agentOptions);
+ this._proxyAgent = agent;
+ }
+ // if reusing agent across request and tunneling agent isn't assigned create a new agent
+ if (this._keepAlive && !agent) {
+ const options = { keepAlive: this._keepAlive, maxSockets };
+ agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
+ this._agent = agent;
+ }
+ // if not using private agent and tunnel agent isn't setup then use global agent
+ if (!agent) {
+ agent = usingSsl ? https.globalAgent : http.globalAgent;
+ }
+ if (usingSsl && this._ignoreSslError) {
+ // we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
+ // http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
+ // we have to cast it to any and change it directly
+ agent.options = Object.assign(agent.options || {}, {
+ rejectUnauthorized: false
+ });
+ }
+ return agent;
+ }
+ _performExponentialBackoff(retryNumber) {
+ return __awaiter(this, void 0, void 0, function* () {
+ retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
+ const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
+ return new Promise(resolve => setTimeout(() => resolve(), ms));
+ });
+ }
+ _processResponse(res, options) {
+ return __awaiter(this, void 0, void 0, function* () {
+ return new Promise((resolve, reject) => __awaiter(this, void 0, void 0, function* () {
+ const statusCode = res.message.statusCode || 0;
+ const response = {
+ statusCode,
+ result: null,
+ headers: {}
+ };
+ // not found leads to null obj returned
+ if (statusCode === HttpCodes.NotFound) {
+ resolve(response);
+ }
+ // get the result from the body
+ function dateTimeDeserializer(key, value) {
+ if (typeof value === 'string') {
+ const a = new Date(value);
+ if (!isNaN(a.valueOf())) {
+ return a;
+ }
+ }
+ return value;
+ }
+ let obj;
+ let contents;
+ try {
+ contents = yield res.readBody();
+ if (contents && contents.length > 0) {
+ if (options && options.deserializeDates) {
+ obj = JSON.parse(contents, dateTimeDeserializer);
+ }
+ else {
+ obj = JSON.parse(contents);
+ }
+ response.result = obj;
+ }
+ response.headers = res.message.headers;
+ }
+ catch (err) {
+ // Invalid resource (contents not json); leaving result obj null
+ }
+ // note that 3xx redirects are handled by the http layer.
+ if (statusCode > 299) {
+ let msg;
+ // if exception/error in body, attempt to get better error
+ if (obj && obj.message) {
+ msg = obj.message;
+ }
+ else if (contents && contents.length > 0) {
+ // it may be the case that the exception is in the body message as string
+ msg = contents;
+ }
+ else {
+ msg = `Failed request: (${statusCode})`;
+ }
+ const err = new HttpClientError(msg, statusCode);
+ err.result = response.result;
+ reject(err);
+ }
+ else {
+ resolve(response);
+ }
+ }));
+ });
+ }
+}
+exports.HttpClient = HttpClient;
+const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
+//# sourceMappingURL=index.js.map
+
+/***/ }),
+
+/***/ 9835:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.checkBypass = exports.getProxyUrl = void 0;
+function getProxyUrl(reqUrl) {
+ const usingSsl = reqUrl.protocol === 'https:';
+ if (checkBypass(reqUrl)) {
+ return undefined;
+ }
+ const proxyVar = (() => {
+ if (usingSsl) {
+ return process.env['https_proxy'] || process.env['HTTPS_PROXY'];
+ }
+ else {
+ return process.env['http_proxy'] || process.env['HTTP_PROXY'];
+ }
+ })();
+ if (proxyVar) {
+ return new URL(proxyVar);
+ }
+ else {
+ return undefined;
+ }
+}
+exports.getProxyUrl = getProxyUrl;
+function checkBypass(reqUrl) {
+ if (!reqUrl.hostname) {
+ return false;
+ }
+ const noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
+ if (!noProxy) {
+ return false;
+ }
+ // Determine the request port
+ let reqPort;
+ if (reqUrl.port) {
+ reqPort = Number(reqUrl.port);
+ }
+ else if (reqUrl.protocol === 'http:') {
+ reqPort = 80;
+ }
+ else if (reqUrl.protocol === 'https:') {
+ reqPort = 443;
+ }
+ // Format the request hostname and hostname with port
+ const upperReqHosts = [reqUrl.hostname.toUpperCase()];
+ if (typeof reqPort === 'number') {
+ upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
+ }
+ // Compare request host against noproxy
+ for (const upperNoProxyItem of noProxy
+ .split(',')
+ .map(x => x.trim().toUpperCase())
+ .filter(x => x)) {
+ if (upperReqHosts.some(x => x === upperNoProxyItem)) {
+ return true;
+ }
+ }
+ return false;
+}
+exports.checkBypass = checkBypass;
+//# sourceMappingURL=proxy.js.map
+
+/***/ }),
+
+/***/ 6538:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/* module decorator */ module = __nccwpck_require__.nmd(module);
+
+const colorConvert = __nccwpck_require__(5458);
+
+const wrapAnsi16 = (fn, offset) => function () {
+ const code = fn.apply(colorConvert, arguments);
+ return `\u001B[${code + offset}m`;
+};
+
+const wrapAnsi256 = (fn, offset) => function () {
+ const code = fn.apply(colorConvert, arguments);
+ return `\u001B[${38 + offset};5;${code}m`;
+};
+
+const wrapAnsi16m = (fn, offset) => function () {
+ const rgb = fn.apply(colorConvert, arguments);
+ return `\u001B[${38 + offset};2;${rgb[0]};${rgb[1]};${rgb[2]}m`;
+};
+
+function assembleStyles() {
+ const codes = new Map();
+ const styles = {
+ modifier: {
+ reset: [0, 0],
+ // 21 isn't widely supported and 22 does the same thing
+ bold: [1, 22],
+ dim: [2, 22],
+ italic: [3, 23],
+ underline: [4, 24],
+ inverse: [7, 27],
+ hidden: [8, 28],
+ strikethrough: [9, 29]
+ },
+ color: {
+ black: [30, 39],
+ red: [31, 39],
+ green: [32, 39],
+ yellow: [33, 39],
+ blue: [34, 39],
+ magenta: [35, 39],
+ cyan: [36, 39],
+ white: [37, 39],
+ gray: [90, 39],
+
+ // Bright color
+ redBright: [91, 39],
+ greenBright: [92, 39],
+ yellowBright: [93, 39],
+ blueBright: [94, 39],
+ magentaBright: [95, 39],
+ cyanBright: [96, 39],
+ whiteBright: [97, 39]
+ },
+ bgColor: {
+ bgBlack: [40, 49],
+ bgRed: [41, 49],
+ bgGreen: [42, 49],
+ bgYellow: [43, 49],
+ bgBlue: [44, 49],
+ bgMagenta: [45, 49],
+ bgCyan: [46, 49],
+ bgWhite: [47, 49],
+
+ // Bright color
+ bgBlackBright: [100, 49],
+ bgRedBright: [101, 49],
+ bgGreenBright: [102, 49],
+ bgYellowBright: [103, 49],
+ bgBlueBright: [104, 49],
+ bgMagentaBright: [105, 49],
+ bgCyanBright: [106, 49],
+ bgWhiteBright: [107, 49]
+ }
+ };
+
+ // Fix humans
+ styles.color.grey = styles.color.gray;
+
+ for (const groupName of Object.keys(styles)) {
+ const group = styles[groupName];
+
+ for (const styleName of Object.keys(group)) {
+ const style = group[styleName];
+
+ styles[styleName] = {
+ open: `\u001B[${style[0]}m`,
+ close: `\u001B[${style[1]}m`
+ };
+
+ group[styleName] = styles[styleName];
+
+ codes.set(style[0], style[1]);
+ }
+
+ Object.defineProperty(styles, groupName, {
+ value: group,
+ enumerable: false
+ });
+
+ Object.defineProperty(styles, 'codes', {
+ value: codes,
+ enumerable: false
+ });
+ }
+
+ const ansi2ansi = n => n;
+ const rgb2rgb = (r, g, b) => [r, g, b];
+
+ styles.color.close = '\u001B[39m';
+ styles.bgColor.close = '\u001B[49m';
+
+ styles.color.ansi = {
+ ansi: wrapAnsi16(ansi2ansi, 0)
+ };
+ styles.color.ansi256 = {
+ ansi256: wrapAnsi256(ansi2ansi, 0)
+ };
+ styles.color.ansi16m = {
+ rgb: wrapAnsi16m(rgb2rgb, 0)
+ };
+
+ styles.bgColor.ansi = {
+ ansi: wrapAnsi16(ansi2ansi, 10)
+ };
+ styles.bgColor.ansi256 = {
+ ansi256: wrapAnsi256(ansi2ansi, 10)
+ };
+ styles.bgColor.ansi16m = {
+ rgb: wrapAnsi16m(rgb2rgb, 10)
+ };
+
+ for (let key of Object.keys(colorConvert)) {
+ if (typeof colorConvert[key] !== 'object') {
+ continue;
+ }
+
+ const suite = colorConvert[key];
+
+ if (key === 'ansi16') {
+ key = 'ansi';
+ }
+
+ if ('ansi16' in suite) {
+ styles.color.ansi[key] = wrapAnsi16(suite.ansi16, 0);
+ styles.bgColor.ansi[key] = wrapAnsi16(suite.ansi16, 10);
+ }
+
+ if ('ansi256' in suite) {
+ styles.color.ansi256[key] = wrapAnsi256(suite.ansi256, 0);
+ styles.bgColor.ansi256[key] = wrapAnsi256(suite.ansi256, 10);
+ }
+
+ if ('rgb' in suite) {
+ styles.color.ansi16m[key] = wrapAnsi16m(suite.rgb, 0);
+ styles.bgColor.ansi16m[key] = wrapAnsi16m(suite.rgb, 10);
+ }
+ }
+
+ return styles;
+}
+
+// Make the export immutable
+Object.defineProperty(module, 'exports', {
+ enumerable: true,
+ get: assembleStyles
+});
+
+
+/***/ }),
+
+/***/ 7658:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const escapeStringRegexp = __nccwpck_require__(5898);
+const ansiStyles = __nccwpck_require__(6538);
+const stdoutColor = (__nccwpck_require__(5317).stdout);
+
+const template = __nccwpck_require__(2558);
+
+const isSimpleWindowsTerm = process.platform === 'win32' && !(process.env.TERM || '').toLowerCase().startsWith('xterm');
+
+// `supportsColor.level` → `ansiStyles.color[name]` mapping
+const levelMapping = ['ansi', 'ansi', 'ansi256', 'ansi16m'];
+
+// `color-convert` models to exclude from the Chalk API due to conflicts and such
+const skipModels = new Set(['gray']);
+
+const styles = Object.create(null);
+
+function applyOptions(obj, options) {
+ options = options || {};
+
+ // Detect level if not set manually
+ const scLevel = stdoutColor ? stdoutColor.level : 0;
+ obj.level = options.level === undefined ? scLevel : options.level;
+ obj.enabled = 'enabled' in options ? options.enabled : obj.level > 0;
+}
+
+function Chalk(options) {
+ // We check for this.template here since calling `chalk.constructor()`
+ // by itself will have a `this` of a previously constructed chalk object
+ if (!this || !(this instanceof Chalk) || this.template) {
+ const chalk = {};
+ applyOptions(chalk, options);
+
+ chalk.template = function () {
+ const args = [].slice.call(arguments);
+ return chalkTag.apply(null, [chalk.template].concat(args));
+ };
+
+ Object.setPrototypeOf(chalk, Chalk.prototype);
+ Object.setPrototypeOf(chalk.template, chalk);
+
+ chalk.template.constructor = Chalk;
+
+ return chalk.template;
+ }
+
+ applyOptions(this, options);
+}
+
+// Use bright blue on Windows as the normal blue color is illegible
+if (isSimpleWindowsTerm) {
+ ansiStyles.blue.open = '\u001B[94m';
+}
+
+for (const key of Object.keys(ansiStyles)) {
+ ansiStyles[key].closeRe = new RegExp(escapeStringRegexp(ansiStyles[key].close), 'g');
+
+ styles[key] = {
+ get() {
+ const codes = ansiStyles[key];
+ return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, key);
+ }
+ };
+}
+
+styles.visible = {
+ get() {
+ return build.call(this, this._styles || [], true, 'visible');
+ }
+};
+
+ansiStyles.color.closeRe = new RegExp(escapeStringRegexp(ansiStyles.color.close), 'g');
+for (const model of Object.keys(ansiStyles.color.ansi)) {
+ if (skipModels.has(model)) {
+ continue;
+ }
+
+ styles[model] = {
+ get() {
+ const level = this.level;
+ return function () {
+ const open = ansiStyles.color[levelMapping[level]][model].apply(null, arguments);
+ const codes = {
+ open,
+ close: ansiStyles.color.close,
+ closeRe: ansiStyles.color.closeRe
+ };
+ return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model);
+ };
+ }
+ };
+}
+
+ansiStyles.bgColor.closeRe = new RegExp(escapeStringRegexp(ansiStyles.bgColor.close), 'g');
+for (const model of Object.keys(ansiStyles.bgColor.ansi)) {
+ if (skipModels.has(model)) {
+ continue;
+ }
+
+ const bgModel = 'bg' + model[0].toUpperCase() + model.slice(1);
+ styles[bgModel] = {
+ get() {
+ const level = this.level;
+ return function () {
+ const open = ansiStyles.bgColor[levelMapping[level]][model].apply(null, arguments);
+ const codes = {
+ open,
+ close: ansiStyles.bgColor.close,
+ closeRe: ansiStyles.bgColor.closeRe
+ };
+ return build.call(this, this._styles ? this._styles.concat(codes) : [codes], this._empty, model);
+ };
+ }
+ };
+}
+
+const proto = Object.defineProperties(() => {}, styles);
+
+function build(_styles, _empty, key) {
+ const builder = function () {
+ return applyStyle.apply(builder, arguments);
+ };
+
+ builder._styles = _styles;
+ builder._empty = _empty;
+
+ const self = this;
+
+ Object.defineProperty(builder, 'level', {
+ enumerable: true,
+ get() {
+ return self.level;
+ },
+ set(level) {
+ self.level = level;
+ }
+ });
+
+ Object.defineProperty(builder, 'enabled', {
+ enumerable: true,
+ get() {
+ return self.enabled;
+ },
+ set(enabled) {
+ self.enabled = enabled;
+ }
+ });
+
+ // See below for fix regarding invisible grey/dim combination on Windows
+ builder.hasGrey = this.hasGrey || key === 'gray' || key === 'grey';
+
+ // `__proto__` is used because we must return a function, but there is
+ // no way to create a function with a different prototype
+ builder.__proto__ = proto; // eslint-disable-line no-proto
+
+ return builder;
+}
+
+function applyStyle() {
+ // Support varags, but simply cast to string in case there's only one arg
+ const args = arguments;
+ const argsLen = args.length;
+ let str = String(arguments[0]);
+
+ if (argsLen === 0) {
+ return '';
+ }
+
+ if (argsLen > 1) {
+ // Don't slice `arguments`, it prevents V8 optimizations
+ for (let a = 1; a < argsLen; a++) {
+ str += ' ' + args[a];
+ }
+ }
+
+ if (!this.enabled || this.level <= 0 || !str) {
+ return this._empty ? '' : str;
+ }
+
+ // Turns out that on Windows dimmed gray text becomes invisible in cmd.exe,
+ // see https://github.com/chalk/chalk/issues/58
+ // If we're on Windows and we're dealing with a gray color, temporarily make 'dim' a noop.
+ const originalDim = ansiStyles.dim.open;
+ if (isSimpleWindowsTerm && this.hasGrey) {
+ ansiStyles.dim.open = '';
+ }
+
+ for (const code of this._styles.slice().reverse()) {
+ // Replace any instances already present with a re-opening code
+ // otherwise only the part of the string until said closing code
+ // will be colored, and the rest will simply be 'plain'.
+ str = code.open + str.replace(code.closeRe, code.open) + code.close;
+
+ // Close the styling before a linebreak and reopen
+ // after next line to fix a bleed issue on macOS
+ // https://github.com/chalk/chalk/pull/92
+ str = str.replace(/\r?\n/g, `${code.close}$&${code.open}`);
+ }
+
+ // Reset the original `dim` if we changed it to work around the Windows dimmed gray issue
+ ansiStyles.dim.open = originalDim;
+
+ return str;
+}
+
+function chalkTag(chalk, strings) {
+ if (!Array.isArray(strings)) {
+ // If chalk() was called by itself or with a string,
+ // return the string itself as a string.
+ return [].slice.call(arguments, 1).join(' ');
+ }
+
+ const args = [].slice.call(arguments, 2);
+ const parts = [strings.raw[0]];
+
+ for (let i = 1; i < strings.length; i++) {
+ parts.push(String(args[i - 1]).replace(/[{}\\]/g, '\\$&'));
+ parts.push(String(strings.raw[i]));
+ }
+
+ return template(chalk, parts.join(''));
+}
+
+Object.defineProperties(Chalk.prototype, styles);
+
+module.exports = Chalk(); // eslint-disable-line new-cap
+module.exports.supportsColor = stdoutColor;
+module.exports["default"] = module.exports; // For TypeScript
+
+
+/***/ }),
+
+/***/ 2558:
+/***/ ((module) => {
+
+"use strict";
+
+const TEMPLATE_REGEX = /(?:\\(u[a-f\d]{4}|x[a-f\d]{2}|.))|(?:\{(~)?(\w+(?:\([^)]*\))?(?:\.\w+(?:\([^)]*\))?)*)(?:[ \t]|(?=\r?\n)))|(\})|((?:.|[\r\n\f])+?)/gi;
+const STYLE_REGEX = /(?:^|\.)(\w+)(?:\(([^)]*)\))?/g;
+const STRING_REGEX = /^(['"])((?:\\.|(?!\1)[^\\])*)\1$/;
+const ESCAPE_REGEX = /\\(u[a-f\d]{4}|x[a-f\d]{2}|.)|([^\\])/gi;
+
+const ESCAPES = new Map([
+ ['n', '\n'],
+ ['r', '\r'],
+ ['t', '\t'],
+ ['b', '\b'],
+ ['f', '\f'],
+ ['v', '\v'],
+ ['0', '\0'],
+ ['\\', '\\'],
+ ['e', '\u001B'],
+ ['a', '\u0007']
+]);
+
+function unescape(c) {
+ if ((c[0] === 'u' && c.length === 5) || (c[0] === 'x' && c.length === 3)) {
+ return String.fromCharCode(parseInt(c.slice(1), 16));
+ }
+
+ return ESCAPES.get(c) || c;
+}
+
+function parseArguments(name, args) {
+ const results = [];
+ const chunks = args.trim().split(/\s*,\s*/g);
+ let matches;
+
+ for (const chunk of chunks) {
+ if (!isNaN(chunk)) {
+ results.push(Number(chunk));
+ } else if ((matches = chunk.match(STRING_REGEX))) {
+ results.push(matches[2].replace(ESCAPE_REGEX, (m, escape, chr) => escape ? unescape(escape) : chr));
+ } else {
+ throw new Error(`Invalid Chalk template style argument: ${chunk} (in style '${name}')`);
+ }
+ }
+
+ return results;
+}
+
+function parseStyle(style) {
+ STYLE_REGEX.lastIndex = 0;
+
+ const results = [];
+ let matches;
+
+ while ((matches = STYLE_REGEX.exec(style)) !== null) {
+ const name = matches[1];
+
+ if (matches[2]) {
+ const args = parseArguments(name, matches[2]);
+ results.push([name].concat(args));
+ } else {
+ results.push([name]);
+ }
+ }
+
+ return results;
+}
+
+function buildStyle(chalk, styles) {
+ const enabled = {};
+
+ for (const layer of styles) {
+ for (const style of layer.styles) {
+ enabled[style[0]] = layer.inverse ? null : style.slice(1);
+ }
+ }
+
+ let current = chalk;
+ for (const styleName of Object.keys(enabled)) {
+ if (Array.isArray(enabled[styleName])) {
+ if (!(styleName in current)) {
+ throw new Error(`Unknown Chalk style: ${styleName}`);
+ }
+
+ if (enabled[styleName].length > 0) {
+ current = current[styleName].apply(current, enabled[styleName]);
+ } else {
+ current = current[styleName];
+ }
+ }
+ }
+
+ return current;
+}
+
+module.exports = (chalk, tmp) => {
+ const styles = [];
+ const chunks = [];
+ let chunk = [];
+
+ // eslint-disable-next-line max-params
+ tmp.replace(TEMPLATE_REGEX, (m, escapeChar, inverse, style, close, chr) => {
+ if (escapeChar) {
+ chunk.push(unescape(escapeChar));
+ } else if (style) {
+ const str = chunk.join('');
+ chunk = [];
+ chunks.push(styles.length === 0 ? str : buildStyle(chalk, styles)(str));
+ styles.push({inverse, styles: parseStyle(style)});
+ } else if (close) {
+ if (styles.length === 0) {
+ throw new Error('Found extraneous } in Chalk template literal');
+ }
+
+ chunks.push(buildStyle(chalk, styles)(chunk.join('')));
+ chunk = [];
+ styles.pop();
+ } else {
+ chunk.push(chr);
+ }
+ });
+
+ chunks.push(chunk.join(''));
+
+ if (styles.length > 0) {
+ const errMsg = `Chalk template literal is missing ${styles.length} closing bracket${styles.length === 1 ? '' : 's'} (\`}\`)`;
+ throw new Error(errMsg);
+ }
+
+ return chunks.join('');
+};
+
+
+/***/ }),
+
+/***/ 591:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+/* MIT license */
+var cssKeywords = __nccwpck_require__(2780);
+
+// NOTE: conversions should only return primitive values (i.e. arrays, or
+// values that give correct `typeof` results).
+// do not use box values types (i.e. Number(), String(), etc.)
+
+var reverseKeywords = {};
+for (var key in cssKeywords) {
+ if (cssKeywords.hasOwnProperty(key)) {
+ reverseKeywords[cssKeywords[key]] = key;
+ }
+}
+
+var convert = module.exports = {
+ rgb: {channels: 3, labels: 'rgb'},
+ hsl: {channels: 3, labels: 'hsl'},
+ hsv: {channels: 3, labels: 'hsv'},
+ hwb: {channels: 3, labels: 'hwb'},
+ cmyk: {channels: 4, labels: 'cmyk'},
+ xyz: {channels: 3, labels: 'xyz'},
+ lab: {channels: 3, labels: 'lab'},
+ lch: {channels: 3, labels: 'lch'},
+ hex: {channels: 1, labels: ['hex']},
+ keyword: {channels: 1, labels: ['keyword']},
+ ansi16: {channels: 1, labels: ['ansi16']},
+ ansi256: {channels: 1, labels: ['ansi256']},
+ hcg: {channels: 3, labels: ['h', 'c', 'g']},
+ apple: {channels: 3, labels: ['r16', 'g16', 'b16']},
+ gray: {channels: 1, labels: ['gray']}
+};
+
+// hide .channels and .labels properties
+for (var model in convert) {
+ if (convert.hasOwnProperty(model)) {
+ if (!('channels' in convert[model])) {
+ throw new Error('missing channels property: ' + model);
+ }
+
+ if (!('labels' in convert[model])) {
+ throw new Error('missing channel labels property: ' + model);
+ }
+
+ if (convert[model].labels.length !== convert[model].channels) {
+ throw new Error('channel and label counts mismatch: ' + model);
+ }
+
+ var channels = convert[model].channels;
+ var labels = convert[model].labels;
+ delete convert[model].channels;
+ delete convert[model].labels;
+ Object.defineProperty(convert[model], 'channels', {value: channels});
+ Object.defineProperty(convert[model], 'labels', {value: labels});
+ }
+}
+
+convert.rgb.hsl = function (rgb) {
+ var r = rgb[0] / 255;
+ var g = rgb[1] / 255;
+ var b = rgb[2] / 255;
+ var min = Math.min(r, g, b);
+ var max = Math.max(r, g, b);
+ var delta = max - min;
+ var h;
+ var s;
+ var l;
+
+ if (max === min) {
+ h = 0;
+ } else if (r === max) {
+ h = (g - b) / delta;
+ } else if (g === max) {
+ h = 2 + (b - r) / delta;
+ } else if (b === max) {
+ h = 4 + (r - g) / delta;
+ }
+
+ h = Math.min(h * 60, 360);
+
+ if (h < 0) {
+ h += 360;
+ }
+
+ l = (min + max) / 2;
+
+ if (max === min) {
+ s = 0;
+ } else if (l <= 0.5) {
+ s = delta / (max + min);
+ } else {
+ s = delta / (2 - max - min);
+ }
+
+ return [h, s * 100, l * 100];
+};
+
+convert.rgb.hsv = function (rgb) {
+ var rdif;
+ var gdif;
+ var bdif;
+ var h;
+ var s;
+
+ var r = rgb[0] / 255;
+ var g = rgb[1] / 255;
+ var b = rgb[2] / 255;
+ var v = Math.max(r, g, b);
+ var diff = v - Math.min(r, g, b);
+ var diffc = function (c) {
+ return (v - c) / 6 / diff + 1 / 2;
+ };
+
+ if (diff === 0) {
+ h = s = 0;
+ } else {
+ s = diff / v;
+ rdif = diffc(r);
+ gdif = diffc(g);
+ bdif = diffc(b);
+
+ if (r === v) {
+ h = bdif - gdif;
+ } else if (g === v) {
+ h = (1 / 3) + rdif - bdif;
+ } else if (b === v) {
+ h = (2 / 3) + gdif - rdif;
+ }
+ if (h < 0) {
+ h += 1;
+ } else if (h > 1) {
+ h -= 1;
+ }
+ }
+
+ return [
+ h * 360,
+ s * 100,
+ v * 100
+ ];
+};
+
+convert.rgb.hwb = function (rgb) {
+ var r = rgb[0];
+ var g = rgb[1];
+ var b = rgb[2];
+ var h = convert.rgb.hsl(rgb)[0];
+ var w = 1 / 255 * Math.min(r, Math.min(g, b));
+
+ b = 1 - 1 / 255 * Math.max(r, Math.max(g, b));
+
+ return [h, w * 100, b * 100];
+};
+
+convert.rgb.cmyk = function (rgb) {
+ var r = rgb[0] / 255;
+ var g = rgb[1] / 255;
+ var b = rgb[2] / 255;
+ var c;
+ var m;
+ var y;
+ var k;
+
+ k = Math.min(1 - r, 1 - g, 1 - b);
+ c = (1 - r - k) / (1 - k) || 0;
+ m = (1 - g - k) / (1 - k) || 0;
+ y = (1 - b - k) / (1 - k) || 0;
+
+ return [c * 100, m * 100, y * 100, k * 100];
+};
+
+/**
+ * See https://en.m.wikipedia.org/wiki/Euclidean_distance#Squared_Euclidean_distance
+ * */
+function comparativeDistance(x, y) {
+ return (
+ Math.pow(x[0] - y[0], 2) +
+ Math.pow(x[1] - y[1], 2) +
+ Math.pow(x[2] - y[2], 2)
+ );
+}
+
+convert.rgb.keyword = function (rgb) {
+ var reversed = reverseKeywords[rgb];
+ if (reversed) {
+ return reversed;
+ }
+
+ var currentClosestDistance = Infinity;
+ var currentClosestKeyword;
+
+ for (var keyword in cssKeywords) {
+ if (cssKeywords.hasOwnProperty(keyword)) {
+ var value = cssKeywords[keyword];
+
+ // Compute comparative distance
+ var distance = comparativeDistance(rgb, value);
+
+ // Check if its less, if so set as closest
+ if (distance < currentClosestDistance) {
+ currentClosestDistance = distance;
+ currentClosestKeyword = keyword;
+ }
+ }
+ }
+
+ return currentClosestKeyword;
+};
+
+convert.keyword.rgb = function (keyword) {
+ return cssKeywords[keyword];
+};
+
+convert.rgb.xyz = function (rgb) {
+ var r = rgb[0] / 255;
+ var g = rgb[1] / 255;
+ var b = rgb[2] / 255;
+
+ // assume sRGB
+ r = r > 0.04045 ? Math.pow(((r + 0.055) / 1.055), 2.4) : (r / 12.92);
+ g = g > 0.04045 ? Math.pow(((g + 0.055) / 1.055), 2.4) : (g / 12.92);
+ b = b > 0.04045 ? Math.pow(((b + 0.055) / 1.055), 2.4) : (b / 12.92);
+
+ var x = (r * 0.4124) + (g * 0.3576) + (b * 0.1805);
+ var y = (r * 0.2126) + (g * 0.7152) + (b * 0.0722);
+ var z = (r * 0.0193) + (g * 0.1192) + (b * 0.9505);
+
+ return [x * 100, y * 100, z * 100];
+};
+
+convert.rgb.lab = function (rgb) {
+ var xyz = convert.rgb.xyz(rgb);
+ var x = xyz[0];
+ var y = xyz[1];
+ var z = xyz[2];
+ var l;
+ var a;
+ var b;
+
+ x /= 95.047;
+ y /= 100;
+ z /= 108.883;
+
+ x = x > 0.008856 ? Math.pow(x, 1 / 3) : (7.787 * x) + (16 / 116);
+ y = y > 0.008856 ? Math.pow(y, 1 / 3) : (7.787 * y) + (16 / 116);
+ z = z > 0.008856 ? Math.pow(z, 1 / 3) : (7.787 * z) + (16 / 116);
+
+ l = (116 * y) - 16;
+ a = 500 * (x - y);
+ b = 200 * (y - z);
+
+ return [l, a, b];
+};
+
+convert.hsl.rgb = function (hsl) {
+ var h = hsl[0] / 360;
+ var s = hsl[1] / 100;
+ var l = hsl[2] / 100;
+ var t1;
+ var t2;
+ var t3;
+ var rgb;
+ var val;
+
+ if (s === 0) {
+ val = l * 255;
+ return [val, val, val];
+ }
+
+ if (l < 0.5) {
+ t2 = l * (1 + s);
+ } else {
+ t2 = l + s - l * s;
+ }
+
+ t1 = 2 * l - t2;
+
+ rgb = [0, 0, 0];
+ for (var i = 0; i < 3; i++) {
+ t3 = h + 1 / 3 * -(i - 1);
+ if (t3 < 0) {
+ t3++;
+ }
+ if (t3 > 1) {
+ t3--;
+ }
+
+ if (6 * t3 < 1) {
+ val = t1 + (t2 - t1) * 6 * t3;
+ } else if (2 * t3 < 1) {
+ val = t2;
+ } else if (3 * t3 < 2) {
+ val = t1 + (t2 - t1) * (2 / 3 - t3) * 6;
+ } else {
+ val = t1;
+ }
+
+ rgb[i] = val * 255;
+ }
+
+ return rgb;
+};
+
+convert.hsl.hsv = function (hsl) {
+ var h = hsl[0];
+ var s = hsl[1] / 100;
+ var l = hsl[2] / 100;
+ var smin = s;
+ var lmin = Math.max(l, 0.01);
+ var sv;
+ var v;
+
+ l *= 2;
+ s *= (l <= 1) ? l : 2 - l;
+ smin *= lmin <= 1 ? lmin : 2 - lmin;
+ v = (l + s) / 2;
+ sv = l === 0 ? (2 * smin) / (lmin + smin) : (2 * s) / (l + s);
+
+ return [h, sv * 100, v * 100];
+};
+
+convert.hsv.rgb = function (hsv) {
+ var h = hsv[0] / 60;
+ var s = hsv[1] / 100;
+ var v = hsv[2] / 100;
+ var hi = Math.floor(h) % 6;
+
+ var f = h - Math.floor(h);
+ var p = 255 * v * (1 - s);
+ var q = 255 * v * (1 - (s * f));
+ var t = 255 * v * (1 - (s * (1 - f)));
+ v *= 255;
+
+ switch (hi) {
+ case 0:
+ return [v, t, p];
+ case 1:
+ return [q, v, p];
+ case 2:
+ return [p, v, t];
+ case 3:
+ return [p, q, v];
+ case 4:
+ return [t, p, v];
+ case 5:
+ return [v, p, q];
+ }
+};
+
+convert.hsv.hsl = function (hsv) {
+ var h = hsv[0];
+ var s = hsv[1] / 100;
+ var v = hsv[2] / 100;
+ var vmin = Math.max(v, 0.01);
+ var lmin;
+ var sl;
+ var l;
+
+ l = (2 - s) * v;
+ lmin = (2 - s) * vmin;
+ sl = s * vmin;
+ sl /= (lmin <= 1) ? lmin : 2 - lmin;
+ sl = sl || 0;
+ l /= 2;
+
+ return [h, sl * 100, l * 100];
+};
+
+// http://dev.w3.org/csswg/css-color/#hwb-to-rgb
+convert.hwb.rgb = function (hwb) {
+ var h = hwb[0] / 360;
+ var wh = hwb[1] / 100;
+ var bl = hwb[2] / 100;
+ var ratio = wh + bl;
+ var i;
+ var v;
+ var f;
+ var n;
+
+ // wh + bl cant be > 1
+ if (ratio > 1) {
+ wh /= ratio;
+ bl /= ratio;
+ }
+
+ i = Math.floor(6 * h);
+ v = 1 - bl;
+ f = 6 * h - i;
+
+ if ((i & 0x01) !== 0) {
+ f = 1 - f;
+ }
+
+ n = wh + f * (v - wh); // linear interpolation
+
+ var r;
+ var g;
+ var b;
+ switch (i) {
+ default:
+ case 6:
+ case 0: r = v; g = n; b = wh; break;
+ case 1: r = n; g = v; b = wh; break;
+ case 2: r = wh; g = v; b = n; break;
+ case 3: r = wh; g = n; b = v; break;
+ case 4: r = n; g = wh; b = v; break;
+ case 5: r = v; g = wh; b = n; break;
+ }
+
+ return [r * 255, g * 255, b * 255];
+};
+
+convert.cmyk.rgb = function (cmyk) {
+ var c = cmyk[0] / 100;
+ var m = cmyk[1] / 100;
+ var y = cmyk[2] / 100;
+ var k = cmyk[3] / 100;
+ var r;
+ var g;
+ var b;
+
+ r = 1 - Math.min(1, c * (1 - k) + k);
+ g = 1 - Math.min(1, m * (1 - k) + k);
+ b = 1 - Math.min(1, y * (1 - k) + k);
+
+ return [r * 255, g * 255, b * 255];
+};
+
+convert.xyz.rgb = function (xyz) {
+ var x = xyz[0] / 100;
+ var y = xyz[1] / 100;
+ var z = xyz[2] / 100;
+ var r;
+ var g;
+ var b;
+
+ r = (x * 3.2406) + (y * -1.5372) + (z * -0.4986);
+ g = (x * -0.9689) + (y * 1.8758) + (z * 0.0415);
+ b = (x * 0.0557) + (y * -0.2040) + (z * 1.0570);
+
+ // assume sRGB
+ r = r > 0.0031308
+ ? ((1.055 * Math.pow(r, 1.0 / 2.4)) - 0.055)
+ : r * 12.92;
+
+ g = g > 0.0031308
+ ? ((1.055 * Math.pow(g, 1.0 / 2.4)) - 0.055)
+ : g * 12.92;
+
+ b = b > 0.0031308
+ ? ((1.055 * Math.pow(b, 1.0 / 2.4)) - 0.055)
+ : b * 12.92;
+
+ r = Math.min(Math.max(0, r), 1);
+ g = Math.min(Math.max(0, g), 1);
+ b = Math.min(Math.max(0, b), 1);
+
+ return [r * 255, g * 255, b * 255];
+};
+
+convert.xyz.lab = function (xyz) {
+ var x = xyz[0];
+ var y = xyz[1];
+ var z = xyz[2];
+ var l;
+ var a;
+ var b;
+
+ x /= 95.047;
+ y /= 100;
+ z /= 108.883;
+
+ x = x > 0.008856 ? Math.pow(x, 1 / 3) : (7.787 * x) + (16 / 116);
+ y = y > 0.008856 ? Math.pow(y, 1 / 3) : (7.787 * y) + (16 / 116);
+ z = z > 0.008856 ? Math.pow(z, 1 / 3) : (7.787 * z) + (16 / 116);
+
+ l = (116 * y) - 16;
+ a = 500 * (x - y);
+ b = 200 * (y - z);
+
+ return [l, a, b];
+};
+
+convert.lab.xyz = function (lab) {
+ var l = lab[0];
+ var a = lab[1];
+ var b = lab[2];
+ var x;
+ var y;
+ var z;
+
+ y = (l + 16) / 116;
+ x = a / 500 + y;
+ z = y - b / 200;
+
+ var y2 = Math.pow(y, 3);
+ var x2 = Math.pow(x, 3);
+ var z2 = Math.pow(z, 3);
+ y = y2 > 0.008856 ? y2 : (y - 16 / 116) / 7.787;
+ x = x2 > 0.008856 ? x2 : (x - 16 / 116) / 7.787;
+ z = z2 > 0.008856 ? z2 : (z - 16 / 116) / 7.787;
+
+ x *= 95.047;
+ y *= 100;
+ z *= 108.883;
+
+ return [x, y, z];
+};
+
+convert.lab.lch = function (lab) {
+ var l = lab[0];
+ var a = lab[1];
+ var b = lab[2];
+ var hr;
+ var h;
+ var c;
+
+ hr = Math.atan2(b, a);
+ h = hr * 360 / 2 / Math.PI;
+
+ if (h < 0) {
+ h += 360;
+ }
+
+ c = Math.sqrt(a * a + b * b);
+
+ return [l, c, h];
+};
+
+convert.lch.lab = function (lch) {
+ var l = lch[0];
+ var c = lch[1];
+ var h = lch[2];
+ var a;
+ var b;
+ var hr;
+
+ hr = h / 360 * 2 * Math.PI;
+ a = c * Math.cos(hr);
+ b = c * Math.sin(hr);
+
+ return [l, a, b];
+};
+
+convert.rgb.ansi16 = function (args) {
+ var r = args[0];
+ var g = args[1];
+ var b = args[2];
+ var value = 1 in arguments ? arguments[1] : convert.rgb.hsv(args)[2]; // hsv -> ansi16 optimization
+
+ value = Math.round(value / 50);
+
+ if (value === 0) {
+ return 30;
+ }
+
+ var ansi = 30
+ + ((Math.round(b / 255) << 2)
+ | (Math.round(g / 255) << 1)
+ | Math.round(r / 255));
+
+ if (value === 2) {
+ ansi += 60;
+ }
+
+ return ansi;
+};
+
+convert.hsv.ansi16 = function (args) {
+ // optimization here; we already know the value and don't need to get
+ // it converted for us.
+ return convert.rgb.ansi16(convert.hsv.rgb(args), args[2]);
+};
+
+convert.rgb.ansi256 = function (args) {
+ var r = args[0];
+ var g = args[1];
+ var b = args[2];
+
+ // we use the extended greyscale palette here, with the exception of
+ // black and white. normal palette only has 4 greyscale shades.
+ if (r === g && g === b) {
+ if (r < 8) {
+ return 16;
+ }
+
+ if (r > 248) {
+ return 231;
+ }
+
+ return Math.round(((r - 8) / 247) * 24) + 232;
+ }
+
+ var ansi = 16
+ + (36 * Math.round(r / 255 * 5))
+ + (6 * Math.round(g / 255 * 5))
+ + Math.round(b / 255 * 5);
+
+ return ansi;
+};
+
+convert.ansi16.rgb = function (args) {
+ var color = args % 10;
+
+ // handle greyscale
+ if (color === 0 || color === 7) {
+ if (args > 50) {
+ color += 3.5;
+ }
+
+ color = color / 10.5 * 255;
+
+ return [color, color, color];
+ }
+
+ var mult = (~~(args > 50) + 1) * 0.5;
+ var r = ((color & 1) * mult) * 255;
+ var g = (((color >> 1) & 1) * mult) * 255;
+ var b = (((color >> 2) & 1) * mult) * 255;
+
+ return [r, g, b];
+};
+
+convert.ansi256.rgb = function (args) {
+ // handle greyscale
+ if (args >= 232) {
+ var c = (args - 232) * 10 + 8;
+ return [c, c, c];
+ }
+
+ args -= 16;
+
+ var rem;
+ var r = Math.floor(args / 36) / 5 * 255;
+ var g = Math.floor((rem = args % 36) / 6) / 5 * 255;
+ var b = (rem % 6) / 5 * 255;
+
+ return [r, g, b];
+};
+
+convert.rgb.hex = function (args) {
+ var integer = ((Math.round(args[0]) & 0xFF) << 16)
+ + ((Math.round(args[1]) & 0xFF) << 8)
+ + (Math.round(args[2]) & 0xFF);
+
+ var string = integer.toString(16).toUpperCase();
+ return '000000'.substring(string.length) + string;
+};
+
+convert.hex.rgb = function (args) {
+ var match = args.toString(16).match(/[a-f0-9]{6}|[a-f0-9]{3}/i);
+ if (!match) {
+ return [0, 0, 0];
+ }
+
+ var colorString = match[0];
+
+ if (match[0].length === 3) {
+ colorString = colorString.split('').map(function (char) {
+ return char + char;
+ }).join('');
+ }
+
+ var integer = parseInt(colorString, 16);
+ var r = (integer >> 16) & 0xFF;
+ var g = (integer >> 8) & 0xFF;
+ var b = integer & 0xFF;
+
+ return [r, g, b];
+};
+
+convert.rgb.hcg = function (rgb) {
+ var r = rgb[0] / 255;
+ var g = rgb[1] / 255;
+ var b = rgb[2] / 255;
+ var max = Math.max(Math.max(r, g), b);
+ var min = Math.min(Math.min(r, g), b);
+ var chroma = (max - min);
+ var grayscale;
+ var hue;
+
+ if (chroma < 1) {
+ grayscale = min / (1 - chroma);
+ } else {
+ grayscale = 0;
+ }
+
+ if (chroma <= 0) {
+ hue = 0;
+ } else
+ if (max === r) {
+ hue = ((g - b) / chroma) % 6;
+ } else
+ if (max === g) {
+ hue = 2 + (b - r) / chroma;
+ } else {
+ hue = 4 + (r - g) / chroma + 4;
+ }
+
+ hue /= 6;
+ hue %= 1;
+
+ return [hue * 360, chroma * 100, grayscale * 100];
+};
+
+convert.hsl.hcg = function (hsl) {
+ var s = hsl[1] / 100;
+ var l = hsl[2] / 100;
+ var c = 1;
+ var f = 0;
+
+ if (l < 0.5) {
+ c = 2.0 * s * l;
+ } else {
+ c = 2.0 * s * (1.0 - l);
+ }
+
+ if (c < 1.0) {
+ f = (l - 0.5 * c) / (1.0 - c);
+ }
+
+ return [hsl[0], c * 100, f * 100];
+};
+
+convert.hsv.hcg = function (hsv) {
+ var s = hsv[1] / 100;
+ var v = hsv[2] / 100;
+
+ var c = s * v;
+ var f = 0;
+
+ if (c < 1.0) {
+ f = (v - c) / (1 - c);
+ }
+
+ return [hsv[0], c * 100, f * 100];
+};
+
+convert.hcg.rgb = function (hcg) {
+ var h = hcg[0] / 360;
+ var c = hcg[1] / 100;
+ var g = hcg[2] / 100;
+
+ if (c === 0.0) {
+ return [g * 255, g * 255, g * 255];
+ }
+
+ var pure = [0, 0, 0];
+ var hi = (h % 1) * 6;
+ var v = hi % 1;
+ var w = 1 - v;
+ var mg = 0;
+
+ switch (Math.floor(hi)) {
+ case 0:
+ pure[0] = 1; pure[1] = v; pure[2] = 0; break;
+ case 1:
+ pure[0] = w; pure[1] = 1; pure[2] = 0; break;
+ case 2:
+ pure[0] = 0; pure[1] = 1; pure[2] = v; break;
+ case 3:
+ pure[0] = 0; pure[1] = w; pure[2] = 1; break;
+ case 4:
+ pure[0] = v; pure[1] = 0; pure[2] = 1; break;
+ default:
+ pure[0] = 1; pure[1] = 0; pure[2] = w;
+ }
+
+ mg = (1.0 - c) * g;
+
+ return [
+ (c * pure[0] + mg) * 255,
+ (c * pure[1] + mg) * 255,
+ (c * pure[2] + mg) * 255
+ ];
+};
+
+convert.hcg.hsv = function (hcg) {
+ var c = hcg[1] / 100;
+ var g = hcg[2] / 100;
+
+ var v = c + g * (1.0 - c);
+ var f = 0;
+
+ if (v > 0.0) {
+ f = c / v;
+ }
+
+ return [hcg[0], f * 100, v * 100];
+};
+
+convert.hcg.hsl = function (hcg) {
+ var c = hcg[1] / 100;
+ var g = hcg[2] / 100;
+
+ var l = g * (1.0 - c) + 0.5 * c;
+ var s = 0;
+
+ if (l > 0.0 && l < 0.5) {
+ s = c / (2 * l);
+ } else
+ if (l >= 0.5 && l < 1.0) {
+ s = c / (2 * (1 - l));
+ }
+
+ return [hcg[0], s * 100, l * 100];
+};
+
+convert.hcg.hwb = function (hcg) {
+ var c = hcg[1] / 100;
+ var g = hcg[2] / 100;
+ var v = c + g * (1.0 - c);
+ return [hcg[0], (v - c) * 100, (1 - v) * 100];
+};
+
+convert.hwb.hcg = function (hwb) {
+ var w = hwb[1] / 100;
+ var b = hwb[2] / 100;
+ var v = 1 - b;
+ var c = v - w;
+ var g = 0;
+
+ if (c < 1) {
+ g = (v - c) / (1 - c);
+ }
+
+ return [hwb[0], c * 100, g * 100];
+};
+
+convert.apple.rgb = function (apple) {
+ return [(apple[0] / 65535) * 255, (apple[1] / 65535) * 255, (apple[2] / 65535) * 255];
+};
+
+convert.rgb.apple = function (rgb) {
+ return [(rgb[0] / 255) * 65535, (rgb[1] / 255) * 65535, (rgb[2] / 255) * 65535];
+};
+
+convert.gray.rgb = function (args) {
+ return [args[0] / 100 * 255, args[0] / 100 * 255, args[0] / 100 * 255];
+};
+
+convert.gray.hsl = convert.gray.hsv = function (args) {
+ return [0, 0, args[0]];
+};
+
+convert.gray.hwb = function (gray) {
+ return [0, 100, gray[0]];
+};
+
+convert.gray.cmyk = function (gray) {
+ return [0, 0, 0, gray[0]];
+};
+
+convert.gray.lab = function (gray) {
+ return [gray[0], 0, 0];
+};
+
+convert.gray.hex = function (gray) {
+ var val = Math.round(gray[0] / 100 * 255) & 0xFF;
+ var integer = (val << 16) + (val << 8) + val;
+
+ var string = integer.toString(16).toUpperCase();
+ return '000000'.substring(string.length) + string;
+};
+
+convert.rgb.gray = function (rgb) {
+ var val = (rgb[0] + rgb[1] + rgb[2]) / 3;
+ return [val / 255 * 100];
+};
+
+
+/***/ }),
+
+/***/ 5458:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var conversions = __nccwpck_require__(591);
+var route = __nccwpck_require__(9769);
+
+var convert = {};
+
+var models = Object.keys(conversions);
+
+function wrapRaw(fn) {
+ var wrappedFn = function (args) {
+ if (args === undefined || args === null) {
+ return args;
+ }
+
+ if (arguments.length > 1) {
+ args = Array.prototype.slice.call(arguments);
+ }
+
+ return fn(args);
+ };
+
+ // preserve .conversion property if there is one
+ if ('conversion' in fn) {
+ wrappedFn.conversion = fn.conversion;
+ }
+
+ return wrappedFn;
+}
+
+function wrapRounded(fn) {
+ var wrappedFn = function (args) {
+ if (args === undefined || args === null) {
+ return args;
+ }
+
+ if (arguments.length > 1) {
+ args = Array.prototype.slice.call(arguments);
+ }
+
+ var result = fn(args);
+
+ // we're assuming the result is an array here.
+ // see notice in conversions.js; don't use box types
+ // in conversion functions.
+ if (typeof result === 'object') {
+ for (var len = result.length, i = 0; i < len; i++) {
+ result[i] = Math.round(result[i]);
+ }
+ }
+
+ return result;
+ };
+
+ // preserve .conversion property if there is one
+ if ('conversion' in fn) {
+ wrappedFn.conversion = fn.conversion;
+ }
+
+ return wrappedFn;
+}
+
+models.forEach(function (fromModel) {
+ convert[fromModel] = {};
+
+ Object.defineProperty(convert[fromModel], 'channels', {value: conversions[fromModel].channels});
+ Object.defineProperty(convert[fromModel], 'labels', {value: conversions[fromModel].labels});
+
+ var routes = route(fromModel);
+ var routeModels = Object.keys(routes);
+
+ routeModels.forEach(function (toModel) {
+ var fn = routes[toModel];
+
+ convert[fromModel][toModel] = wrapRounded(fn);
+ convert[fromModel][toModel].raw = wrapRaw(fn);
+ });
+});
+
+module.exports = convert;
+
+
+/***/ }),
+
+/***/ 9769:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var conversions = __nccwpck_require__(591);
+
+/*
+ this function routes a model to all other models.
+
+ all functions that are routed have a property `.conversion` attached
+ to the returned synthetic function. This property is an array
+ of strings, each with the steps in between the 'from' and 'to'
+ color models (inclusive).
+
+ conversions that are not possible simply are not included.
+*/
+
+function buildGraph() {
+ var graph = {};
+ // https://jsperf.com/object-keys-vs-for-in-with-closure/3
+ var models = Object.keys(conversions);
+
+ for (var len = models.length, i = 0; i < len; i++) {
+ graph[models[i]] = {
+ // http://jsperf.com/1-vs-infinity
+ // micro-opt, but this is simple.
+ distance: -1,
+ parent: null
+ };
+ }
+
+ return graph;
+}
+
+// https://en.wikipedia.org/wiki/Breadth-first_search
+function deriveBFS(fromModel) {
+ var graph = buildGraph();
+ var queue = [fromModel]; // unshift -> queue -> pop
+
+ graph[fromModel].distance = 0;
+
+ while (queue.length) {
+ var current = queue.pop();
+ var adjacents = Object.keys(conversions[current]);
+
+ for (var len = adjacents.length, i = 0; i < len; i++) {
+ var adjacent = adjacents[i];
+ var node = graph[adjacent];
+
+ if (node.distance === -1) {
+ node.distance = graph[current].distance + 1;
+ node.parent = current;
+ queue.unshift(adjacent);
+ }
+ }
+ }
+
+ return graph;
+}
+
+function link(from, to) {
+ return function (args) {
+ return to(from(args));
+ };
+}
+
+function wrapConversion(toModel, graph) {
+ var path = [graph[toModel].parent, toModel];
+ var fn = conversions[graph[toModel].parent][toModel];
+
+ var cur = graph[toModel].parent;
+ while (graph[cur].parent) {
+ path.unshift(graph[cur].parent);
+ fn = link(conversions[graph[cur].parent][cur], fn);
+ cur = graph[cur].parent;
+ }
+
+ fn.conversion = path;
+ return fn;
+}
+
+module.exports = function (fromModel) {
+ var graph = deriveBFS(fromModel);
+ var conversion = {};
+
+ var models = Object.keys(graph);
+ for (var len = models.length, i = 0; i < len; i++) {
+ var toModel = models[i];
+ var node = graph[toModel];
+
+ if (node.parent === null) {
+ // no possible conversion, or this node is the source model.
+ continue;
+ }
+
+ conversion[toModel] = wrapConversion(toModel, graph);
+ }
+
+ return conversion;
+};
+
+
+
+/***/ }),
+
+/***/ 2780:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ "aliceblue": [240, 248, 255],
+ "antiquewhite": [250, 235, 215],
+ "aqua": [0, 255, 255],
+ "aquamarine": [127, 255, 212],
+ "azure": [240, 255, 255],
+ "beige": [245, 245, 220],
+ "bisque": [255, 228, 196],
+ "black": [0, 0, 0],
+ "blanchedalmond": [255, 235, 205],
+ "blue": [0, 0, 255],
+ "blueviolet": [138, 43, 226],
+ "brown": [165, 42, 42],
+ "burlywood": [222, 184, 135],
+ "cadetblue": [95, 158, 160],
+ "chartreuse": [127, 255, 0],
+ "chocolate": [210, 105, 30],
+ "coral": [255, 127, 80],
+ "cornflowerblue": [100, 149, 237],
+ "cornsilk": [255, 248, 220],
+ "crimson": [220, 20, 60],
+ "cyan": [0, 255, 255],
+ "darkblue": [0, 0, 139],
+ "darkcyan": [0, 139, 139],
+ "darkgoldenrod": [184, 134, 11],
+ "darkgray": [169, 169, 169],
+ "darkgreen": [0, 100, 0],
+ "darkgrey": [169, 169, 169],
+ "darkkhaki": [189, 183, 107],
+ "darkmagenta": [139, 0, 139],
+ "darkolivegreen": [85, 107, 47],
+ "darkorange": [255, 140, 0],
+ "darkorchid": [153, 50, 204],
+ "darkred": [139, 0, 0],
+ "darksalmon": [233, 150, 122],
+ "darkseagreen": [143, 188, 143],
+ "darkslateblue": [72, 61, 139],
+ "darkslategray": [47, 79, 79],
+ "darkslategrey": [47, 79, 79],
+ "darkturquoise": [0, 206, 209],
+ "darkviolet": [148, 0, 211],
+ "deeppink": [255, 20, 147],
+ "deepskyblue": [0, 191, 255],
+ "dimgray": [105, 105, 105],
+ "dimgrey": [105, 105, 105],
+ "dodgerblue": [30, 144, 255],
+ "firebrick": [178, 34, 34],
+ "floralwhite": [255, 250, 240],
+ "forestgreen": [34, 139, 34],
+ "fuchsia": [255, 0, 255],
+ "gainsboro": [220, 220, 220],
+ "ghostwhite": [248, 248, 255],
+ "gold": [255, 215, 0],
+ "goldenrod": [218, 165, 32],
+ "gray": [128, 128, 128],
+ "green": [0, 128, 0],
+ "greenyellow": [173, 255, 47],
+ "grey": [128, 128, 128],
+ "honeydew": [240, 255, 240],
+ "hotpink": [255, 105, 180],
+ "indianred": [205, 92, 92],
+ "indigo": [75, 0, 130],
+ "ivory": [255, 255, 240],
+ "khaki": [240, 230, 140],
+ "lavender": [230, 230, 250],
+ "lavenderblush": [255, 240, 245],
+ "lawngreen": [124, 252, 0],
+ "lemonchiffon": [255, 250, 205],
+ "lightblue": [173, 216, 230],
+ "lightcoral": [240, 128, 128],
+ "lightcyan": [224, 255, 255],
+ "lightgoldenrodyellow": [250, 250, 210],
+ "lightgray": [211, 211, 211],
+ "lightgreen": [144, 238, 144],
+ "lightgrey": [211, 211, 211],
+ "lightpink": [255, 182, 193],
+ "lightsalmon": [255, 160, 122],
+ "lightseagreen": [32, 178, 170],
+ "lightskyblue": [135, 206, 250],
+ "lightslategray": [119, 136, 153],
+ "lightslategrey": [119, 136, 153],
+ "lightsteelblue": [176, 196, 222],
+ "lightyellow": [255, 255, 224],
+ "lime": [0, 255, 0],
+ "limegreen": [50, 205, 50],
+ "linen": [250, 240, 230],
+ "magenta": [255, 0, 255],
+ "maroon": [128, 0, 0],
+ "mediumaquamarine": [102, 205, 170],
+ "mediumblue": [0, 0, 205],
+ "mediumorchid": [186, 85, 211],
+ "mediumpurple": [147, 112, 219],
+ "mediumseagreen": [60, 179, 113],
+ "mediumslateblue": [123, 104, 238],
+ "mediumspringgreen": [0, 250, 154],
+ "mediumturquoise": [72, 209, 204],
+ "mediumvioletred": [199, 21, 133],
+ "midnightblue": [25, 25, 112],
+ "mintcream": [245, 255, 250],
+ "mistyrose": [255, 228, 225],
+ "moccasin": [255, 228, 181],
+ "navajowhite": [255, 222, 173],
+ "navy": [0, 0, 128],
+ "oldlace": [253, 245, 230],
+ "olive": [128, 128, 0],
+ "olivedrab": [107, 142, 35],
+ "orange": [255, 165, 0],
+ "orangered": [255, 69, 0],
+ "orchid": [218, 112, 214],
+ "palegoldenrod": [238, 232, 170],
+ "palegreen": [152, 251, 152],
+ "paleturquoise": [175, 238, 238],
+ "palevioletred": [219, 112, 147],
+ "papayawhip": [255, 239, 213],
+ "peachpuff": [255, 218, 185],
+ "peru": [205, 133, 63],
+ "pink": [255, 192, 203],
+ "plum": [221, 160, 221],
+ "powderblue": [176, 224, 230],
+ "purple": [128, 0, 128],
+ "rebeccapurple": [102, 51, 153],
+ "red": [255, 0, 0],
+ "rosybrown": [188, 143, 143],
+ "royalblue": [65, 105, 225],
+ "saddlebrown": [139, 69, 19],
+ "salmon": [250, 128, 114],
+ "sandybrown": [244, 164, 96],
+ "seagreen": [46, 139, 87],
+ "seashell": [255, 245, 238],
+ "sienna": [160, 82, 45],
+ "silver": [192, 192, 192],
+ "skyblue": [135, 206, 235],
+ "slateblue": [106, 90, 205],
+ "slategray": [112, 128, 144],
+ "slategrey": [112, 128, 144],
+ "snow": [255, 250, 250],
+ "springgreen": [0, 255, 127],
+ "steelblue": [70, 130, 180],
+ "tan": [210, 180, 140],
+ "teal": [0, 128, 128],
+ "thistle": [216, 191, 216],
+ "tomato": [255, 99, 71],
+ "turquoise": [64, 224, 208],
+ "violet": [238, 130, 238],
+ "wheat": [245, 222, 179],
+ "white": [255, 255, 255],
+ "whitesmoke": [245, 245, 245],
+ "yellow": [255, 255, 0],
+ "yellowgreen": [154, 205, 50]
+};
+
+
+/***/ }),
+
+/***/ 5898:
+/***/ ((module) => {
+
+"use strict";
+
+
+var matchOperatorsRe = /[|\\{}()[\]^$+*?.]/g;
+
+module.exports = function (str) {
+ if (typeof str !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ return str.replace(matchOperatorsRe, '\\$&');
+};
+
+
+/***/ }),
+
+/***/ 3226:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = (flag, argv) => {
+ argv = argv || process.argv;
+ const prefix = flag.startsWith('-') ? '' : (flag.length === 1 ? '-' : '--');
+ const pos = argv.indexOf(prefix + flag);
+ const terminatorPos = argv.indexOf('--');
+ return pos !== -1 && (terminatorPos === -1 ? true : pos < terminatorPos);
+};
+
+
+/***/ }),
+
+/***/ 5317:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+const os = __nccwpck_require__(2037);
+const hasFlag = __nccwpck_require__(3226);
+
+const env = process.env;
+
+let forceColor;
+if (hasFlag('no-color') ||
+ hasFlag('no-colors') ||
+ hasFlag('color=false')) {
+ forceColor = false;
+} else if (hasFlag('color') ||
+ hasFlag('colors') ||
+ hasFlag('color=true') ||
+ hasFlag('color=always')) {
+ forceColor = true;
+}
+if ('FORCE_COLOR' in env) {
+ forceColor = env.FORCE_COLOR.length === 0 || parseInt(env.FORCE_COLOR, 10) !== 0;
+}
+
+function translateLevel(level) {
+ if (level === 0) {
+ return false;
+ }
+
+ return {
+ level,
+ hasBasic: true,
+ has256: level >= 2,
+ has16m: level >= 3
+ };
+}
+
+function supportsColor(stream) {
+ if (forceColor === false) {
+ return 0;
+ }
+
+ if (hasFlag('color=16m') ||
+ hasFlag('color=full') ||
+ hasFlag('color=truecolor')) {
+ return 3;
+ }
+
+ if (hasFlag('color=256')) {
+ return 2;
+ }
+
+ if (stream && !stream.isTTY && forceColor !== true) {
+ return 0;
+ }
+
+ const min = forceColor ? 1 : 0;
+
+ if (process.platform === 'win32') {
+ // Node.js 7.5.0 is the first version of Node.js to include a patch to
+ // libuv that enables 256 color output on Windows. Anything earlier and it
+ // won't work. However, here we target Node.js 8 at minimum as it is an LTS
+ // release, and Node.js 7 is not. Windows 10 build 10586 is the first Windows
+ // release that supports 256 colors. Windows 10 build 14931 is the first release
+ // that supports 16m/TrueColor.
+ const osRelease = os.release().split('.');
+ if (
+ Number(process.versions.node.split('.')[0]) >= 8 &&
+ Number(osRelease[0]) >= 10 &&
+ Number(osRelease[2]) >= 10586
+ ) {
+ return Number(osRelease[2]) >= 14931 ? 3 : 2;
+ }
+
+ return 1;
+ }
+
+ if ('CI' in env) {
+ if (['TRAVIS', 'CIRCLECI', 'APPVEYOR', 'GITLAB_CI'].some(sign => sign in env) || env.CI_NAME === 'codeship') {
+ return 1;
+ }
+
+ return min;
+ }
+
+ if ('TEAMCITY_VERSION' in env) {
+ return /^(9\.(0*[1-9]\d*)\.|\d{2,}\.)/.test(env.TEAMCITY_VERSION) ? 1 : 0;
+ }
+
+ if (env.COLORTERM === 'truecolor') {
+ return 3;
+ }
+
+ if ('TERM_PROGRAM' in env) {
+ const version = parseInt((env.TERM_PROGRAM_VERSION || '').split('.')[0], 10);
+
+ switch (env.TERM_PROGRAM) {
+ case 'iTerm.app':
+ return version >= 3 ? 3 : 2;
+ case 'Apple_Terminal':
+ return 2;
+ // No default
+ }
+ }
+
+ if (/-256(color)?$/i.test(env.TERM)) {
+ return 2;
+ }
+
+ if (/^screen|^xterm|^vt100|^vt220|^rxvt|color|ansi|cygwin|linux/i.test(env.TERM)) {
+ return 1;
+ }
+
+ if ('COLORTERM' in env) {
+ return 1;
+ }
+
+ if (env.TERM === 'dumb') {
+ return min;
+ }
+
+ return min;
+}
+
+function getSupportLevel(stream) {
+ const level = supportsColor(stream);
+ return translateLevel(level);
+}
+
+module.exports = {
+ supportsColor: getSupportLevel,
+ stdout: getSupportLevel(process.stdout),
+ stderr: getSupportLevel(process.stderr)
+};
+
+
+/***/ }),
+
+/***/ 334:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
+const REGEX_IS_INSTALLATION = /^ghs_/;
+const REGEX_IS_USER_TO_SERVER = /^ghu_/;
+async function auth(token) {
+ const isApp = token.split(/\./).length === 3;
+ const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
+ const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
+ const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
+ return {
+ type: "token",
+ token: token,
+ tokenType
+ };
+}
+
+/**
+ * Prefix token for usage in the Authorization header
+ *
+ * @param token OAuth token or JSON Web Token
+ */
+function withAuthorizationPrefix(token) {
+ if (token.split(/\./).length === 3) {
+ return `bearer ${token}`;
+ }
+
+ return `token ${token}`;
+}
+
+async function hook(token, request, route, parameters) {
+ const endpoint = request.endpoint.merge(route, parameters);
+ endpoint.headers.authorization = withAuthorizationPrefix(token);
+ return request(endpoint);
+}
+
+const createTokenAuth = function createTokenAuth(token) {
+ if (!token) {
+ throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
+ }
+
+ if (typeof token !== "string") {
+ throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
+ }
+
+ token = token.replace(/^(token|bearer) +/i, "");
+ return Object.assign(auth.bind(null, token), {
+ hook: hook.bind(null, token)
+ });
+};
+
+exports.createTokenAuth = createTokenAuth;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 6762:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var universalUserAgent = __nccwpck_require__(5030);
+var beforeAfterHook = __nccwpck_require__(3682);
+var request = __nccwpck_require__(6234);
+var graphql = __nccwpck_require__(8467);
+var authToken = __nccwpck_require__(334);
+
+function _objectWithoutPropertiesLoose(source, excluded) {
+ if (source == null) return {};
+ var target = {};
+ var sourceKeys = Object.keys(source);
+ var key, i;
+
+ for (i = 0; i < sourceKeys.length; i++) {
+ key = sourceKeys[i];
+ if (excluded.indexOf(key) >= 0) continue;
+ target[key] = source[key];
+ }
+
+ return target;
+}
+
+function _objectWithoutProperties(source, excluded) {
+ if (source == null) return {};
+
+ var target = _objectWithoutPropertiesLoose(source, excluded);
+
+ var key, i;
+
+ if (Object.getOwnPropertySymbols) {
+ var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
+
+ for (i = 0; i < sourceSymbolKeys.length; i++) {
+ key = sourceSymbolKeys[i];
+ if (excluded.indexOf(key) >= 0) continue;
+ if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
+ target[key] = source[key];
+ }
+ }
+
+ return target;
+}
+
+const VERSION = "3.6.0";
+
+const _excluded = ["authStrategy"];
+class Octokit {
+ constructor(options = {}) {
+ const hook = new beforeAfterHook.Collection();
+ const requestDefaults = {
+ baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
+ headers: {},
+ request: Object.assign({}, options.request, {
+ // @ts-ignore internal usage only, no need to type
+ hook: hook.bind(null, "request")
+ }),
+ mediaType: {
+ previews: [],
+ format: ""
+ }
+ }; // prepend default user agent with `options.userAgent` if set
+
+ requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
+
+ if (options.baseUrl) {
+ requestDefaults.baseUrl = options.baseUrl;
+ }
+
+ if (options.previews) {
+ requestDefaults.mediaType.previews = options.previews;
+ }
+
+ if (options.timeZone) {
+ requestDefaults.headers["time-zone"] = options.timeZone;
+ }
+
+ this.request = request.request.defaults(requestDefaults);
+ this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
+ this.log = Object.assign({
+ debug: () => {},
+ info: () => {},
+ warn: console.warn.bind(console),
+ error: console.error.bind(console)
+ }, options.log);
+ this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
+ // is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
+ // (2) If only `options.auth` is set, use the default token authentication strategy.
+ // (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
+ // TODO: type `options.auth` based on `options.authStrategy`.
+
+ if (!options.authStrategy) {
+ if (!options.auth) {
+ // (1)
+ this.auth = async () => ({
+ type: "unauthenticated"
+ });
+ } else {
+ // (2)
+ const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
+
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ }
+ } else {
+ const {
+ authStrategy
+ } = options,
+ otherOptions = _objectWithoutProperties(options, _excluded);
+
+ const auth = authStrategy(Object.assign({
+ request: this.request,
+ log: this.log,
+ // we pass the current octokit instance as well as its constructor options
+ // to allow for authentication strategies that return a new octokit instance
+ // that shares the same internal state as the current one. The original
+ // requirement for this was the "event-octokit" authentication strategy
+ // of https://github.com/probot/octokit-auth-probot.
+ octokit: this,
+ octokitOptions: otherOptions
+ }, options.auth)); // @ts-ignore ¯\_(ツ)_/¯
+
+ hook.wrap("request", auth.hook);
+ this.auth = auth;
+ } // apply plugins
+ // https://stackoverflow.com/a/16345172
+
+
+ const classConstructor = this.constructor;
+ classConstructor.plugins.forEach(plugin => {
+ Object.assign(this, plugin(this, options));
+ });
+ }
+
+ static defaults(defaults) {
+ const OctokitWithDefaults = class extends this {
+ constructor(...args) {
+ const options = args[0] || {};
+
+ if (typeof defaults === "function") {
+ super(defaults(options));
+ return;
+ }
+
+ super(Object.assign({}, defaults, options, options.userAgent && defaults.userAgent ? {
+ userAgent: `${options.userAgent} ${defaults.userAgent}`
+ } : null));
+ }
+
+ };
+ return OctokitWithDefaults;
+ }
+ /**
+ * Attach a plugin (or many) to your Octokit instance.
+ *
+ * @example
+ * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...)
+ */
+
+
+ static plugin(...newPlugins) {
+ var _a;
+
+ const currentPlugins = this.plugins;
+ const NewOctokit = (_a = class extends this {}, _a.plugins = currentPlugins.concat(newPlugins.filter(plugin => !currentPlugins.includes(plugin))), _a);
+ return NewOctokit;
+ }
+
+}
+Octokit.VERSION = VERSION;
+Octokit.plugins = [];
+
+exports.Octokit = Octokit;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 9440:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var isPlainObject = __nccwpck_require__(3287);
+var universalUserAgent = __nccwpck_require__(5030);
+
+function lowercaseKeys(object) {
+ if (!object) {
+ return {};
+ }
+
+ return Object.keys(object).reduce((newObj, key) => {
+ newObj[key.toLowerCase()] = object[key];
+ return newObj;
+ }, {});
+}
+
+function mergeDeep(defaults, options) {
+ const result = Object.assign({}, defaults);
+ Object.keys(options).forEach(key => {
+ if (isPlainObject.isPlainObject(options[key])) {
+ if (!(key in defaults)) Object.assign(result, {
+ [key]: options[key]
+ });else result[key] = mergeDeep(defaults[key], options[key]);
+ } else {
+ Object.assign(result, {
+ [key]: options[key]
+ });
+ }
+ });
+ return result;
+}
+
+function removeUndefinedProperties(obj) {
+ for (const key in obj) {
+ if (obj[key] === undefined) {
+ delete obj[key];
+ }
+ }
+
+ return obj;
+}
+
+function merge(defaults, route, options) {
+ if (typeof route === "string") {
+ let [method, url] = route.split(" ");
+ options = Object.assign(url ? {
+ method,
+ url
+ } : {
+ url: method
+ }, options);
+ } else {
+ options = Object.assign({}, route);
+ } // lowercase header names before merging with defaults to avoid duplicates
+
+
+ options.headers = lowercaseKeys(options.headers); // remove properties with undefined values before merging
+
+ removeUndefinedProperties(options);
+ removeUndefinedProperties(options.headers);
+ const mergedOptions = mergeDeep(defaults || {}, options); // mediaType.previews arrays are merged, instead of overwritten
+
+ if (defaults && defaults.mediaType.previews.length) {
+ mergedOptions.mediaType.previews = defaults.mediaType.previews.filter(preview => !mergedOptions.mediaType.previews.includes(preview)).concat(mergedOptions.mediaType.previews);
+ }
+
+ mergedOptions.mediaType.previews = mergedOptions.mediaType.previews.map(preview => preview.replace(/-preview/, ""));
+ return mergedOptions;
+}
+
+function addQueryParameters(url, parameters) {
+ const separator = /\?/.test(url) ? "&" : "?";
+ const names = Object.keys(parameters);
+
+ if (names.length === 0) {
+ return url;
+ }
+
+ return url + separator + names.map(name => {
+ if (name === "q") {
+ return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+");
+ }
+
+ return `${name}=${encodeURIComponent(parameters[name])}`;
+ }).join("&");
+}
+
+const urlVariableRegex = /\{[^}]+\}/g;
+
+function removeNonChars(variableName) {
+ return variableName.replace(/^\W+|\W+$/g, "").split(/,/);
+}
+
+function extractUrlVariableNames(url) {
+ const matches = url.match(urlVariableRegex);
+
+ if (!matches) {
+ return [];
+ }
+
+ return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []);
+}
+
+function omit(object, keysToOmit) {
+ return Object.keys(object).filter(option => !keysToOmit.includes(option)).reduce((obj, key) => {
+ obj[key] = object[key];
+ return obj;
+ }, {});
+}
+
+// Based on https://github.com/bramstein/url-template, licensed under BSD
+// TODO: create separate package.
+//
+// Copyright (c) 2012-2014, Bram Stein
+// All rights reserved.
+// Redistribution and use in source and binary forms, with or without
+// modification, are permitted provided that the following conditions
+// are met:
+// 1. Redistributions of source code must retain the above copyright
+// notice, this list of conditions and the following disclaimer.
+// 2. Redistributions in binary form must reproduce the above copyright
+// notice, this list of conditions and the following disclaimer in the
+// documentation and/or other materials provided with the distribution.
+// 3. The name of the author may not be used to endorse or promote products
+// derived from this software without specific prior written permission.
+// THIS SOFTWARE IS PROVIDED BY THE AUTHOR "AS IS" AND ANY EXPRESS OR IMPLIED
+// WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
+// MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
+// EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT,
+// INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
+// BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
+// OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
+// NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
+// EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+/* istanbul ignore file */
+function encodeReserved(str) {
+ return str.split(/(%[0-9A-Fa-f]{2})/g).map(function (part) {
+ if (!/%[0-9A-Fa-f]/.test(part)) {
+ part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]");
+ }
+
+ return part;
+ }).join("");
+}
+
+function encodeUnreserved(str) {
+ return encodeURIComponent(str).replace(/[!'()*]/g, function (c) {
+ return "%" + c.charCodeAt(0).toString(16).toUpperCase();
+ });
+}
+
+function encodeValue(operator, value, key) {
+ value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value);
+
+ if (key) {
+ return encodeUnreserved(key) + "=" + value;
+ } else {
+ return value;
+ }
+}
+
+function isDefined(value) {
+ return value !== undefined && value !== null;
+}
+
+function isKeyOperator(operator) {
+ return operator === ";" || operator === "&" || operator === "?";
+}
+
+function getValues(context, operator, key, modifier) {
+ var value = context[key],
+ result = [];
+
+ if (isDefined(value) && value !== "") {
+ if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") {
+ value = value.toString();
+
+ if (modifier && modifier !== "*") {
+ value = value.substring(0, parseInt(modifier, 10));
+ }
+
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ } else {
+ if (modifier === "*") {
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ result.push(encodeValue(operator, value, isKeyOperator(operator) ? key : ""));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ result.push(encodeValue(operator, value[k], k));
+ }
+ });
+ }
+ } else {
+ const tmp = [];
+
+ if (Array.isArray(value)) {
+ value.filter(isDefined).forEach(function (value) {
+ tmp.push(encodeValue(operator, value));
+ });
+ } else {
+ Object.keys(value).forEach(function (k) {
+ if (isDefined(value[k])) {
+ tmp.push(encodeUnreserved(k));
+ tmp.push(encodeValue(operator, value[k].toString()));
+ }
+ });
+ }
+
+ if (isKeyOperator(operator)) {
+ result.push(encodeUnreserved(key) + "=" + tmp.join(","));
+ } else if (tmp.length !== 0) {
+ result.push(tmp.join(","));
+ }
+ }
+ }
+ } else {
+ if (operator === ";") {
+ if (isDefined(value)) {
+ result.push(encodeUnreserved(key));
+ }
+ } else if (value === "" && (operator === "&" || operator === "?")) {
+ result.push(encodeUnreserved(key) + "=");
+ } else if (value === "") {
+ result.push("");
+ }
+ }
+
+ return result;
+}
+
+function parseUrl(template) {
+ return {
+ expand: expand.bind(null, template)
+ };
+}
+
+function expand(template, context) {
+ var operators = ["+", "#", ".", "/", ";", "?", "&"];
+ return template.replace(/\{([^\{\}]+)\}|([^\{\}]+)/g, function (_, expression, literal) {
+ if (expression) {
+ let operator = "";
+ const values = [];
+
+ if (operators.indexOf(expression.charAt(0)) !== -1) {
+ operator = expression.charAt(0);
+ expression = expression.substr(1);
+ }
+
+ expression.split(/,/g).forEach(function (variable) {
+ var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable);
+ values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3]));
+ });
+
+ if (operator && operator !== "+") {
+ var separator = ",";
+
+ if (operator === "?") {
+ separator = "&";
+ } else if (operator !== "#") {
+ separator = operator;
+ }
+
+ return (values.length !== 0 ? operator : "") + values.join(separator);
+ } else {
+ return values.join(",");
+ }
+ } else {
+ return encodeReserved(literal);
+ }
+ });
+}
+
+function parse(options) {
+ // https://fetch.spec.whatwg.org/#methods
+ let method = options.method.toUpperCase(); // replace :varname with {varname} to make it RFC 6570 compatible
+
+ let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}");
+ let headers = Object.assign({}, options.headers);
+ let body;
+ let parameters = omit(options, ["method", "baseUrl", "url", "headers", "request", "mediaType"]); // extract variable names from URL to calculate remaining variables later
+
+ const urlVariableNames = extractUrlVariableNames(url);
+ url = parseUrl(url).expand(parameters);
+
+ if (!/^http/.test(url)) {
+ url = options.baseUrl + url;
+ }
+
+ const omittedParameters = Object.keys(options).filter(option => urlVariableNames.includes(option)).concat("baseUrl");
+ const remainingParameters = omit(parameters, omittedParameters);
+ const isBinaryRequest = /application\/octet-stream/i.test(headers.accept);
+
+ if (!isBinaryRequest) {
+ if (options.mediaType.format) {
+ // e.g. application/vnd.github.v3+json => application/vnd.github.v3.raw
+ headers.accept = headers.accept.split(/,/).map(preview => preview.replace(/application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, `application/vnd$1$2.${options.mediaType.format}`)).join(",");
+ }
+
+ if (options.mediaType.previews.length) {
+ const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || [];
+ headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map(preview => {
+ const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json";
+ return `application/vnd.github.${preview}-preview${format}`;
+ }).join(",");
+ }
+ } // for GET/HEAD requests, set URL query parameters from remaining parameters
+ // for PATCH/POST/PUT/DELETE requests, set request body from remaining parameters
+
+
+ if (["GET", "HEAD"].includes(method)) {
+ url = addQueryParameters(url, remainingParameters);
+ } else {
+ if ("data" in remainingParameters) {
+ body = remainingParameters.data;
+ } else {
+ if (Object.keys(remainingParameters).length) {
+ body = remainingParameters;
+ } else {
+ headers["content-length"] = 0;
+ }
+ }
+ } // default content-type for JSON if body is set
+
+
+ if (!headers["content-type"] && typeof body !== "undefined") {
+ headers["content-type"] = "application/json; charset=utf-8";
+ } // GitHub expects 'content-length: 0' header for PUT/PATCH requests without body.
+ // fetch does not allow to set `content-length` header, but we can set body to an empty string
+
+
+ if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") {
+ body = "";
+ } // Only return body/request keys if present
+
+
+ return Object.assign({
+ method,
+ url,
+ headers
+ }, typeof body !== "undefined" ? {
+ body
+ } : null, options.request ? {
+ request: options.request
+ } : null);
+}
+
+function endpointWithDefaults(defaults, route, options) {
+ return parse(merge(defaults, route, options));
+}
+
+function withDefaults(oldDefaults, newDefaults) {
+ const DEFAULTS = merge(oldDefaults, newDefaults);
+ const endpoint = endpointWithDefaults.bind(null, DEFAULTS);
+ return Object.assign(endpoint, {
+ DEFAULTS,
+ defaults: withDefaults.bind(null, DEFAULTS),
+ merge: merge.bind(null, DEFAULTS),
+ parse
+ });
+}
+
+const VERSION = "6.0.12";
+
+const userAgent = `octokit-endpoint.js/${VERSION} ${universalUserAgent.getUserAgent()}`; // DEFAULTS has all properties set that EndpointOptions has, except url.
+// So we use RequestParameters and add method as additional required property.
+
+const DEFAULTS = {
+ method: "GET",
+ baseUrl: "https://api.github.com",
+ headers: {
+ accept: "application/vnd.github.v3+json",
+ "user-agent": userAgent
+ },
+ mediaType: {
+ format: "",
+ previews: []
+ }
+};
+
+const endpoint = withDefaults(null, DEFAULTS);
+
+exports.endpoint = endpoint;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 8467:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+var request = __nccwpck_require__(6234);
+var universalUserAgent = __nccwpck_require__(5030);
+
+const VERSION = "4.8.0";
+
+function _buildMessageForResponseErrors(data) {
+ return `Request failed due to following response errors:\n` + data.errors.map(e => ` - ${e.message}`).join("\n");
+}
+
+class GraphqlResponseError extends Error {
+ constructor(request, headers, response) {
+ super(_buildMessageForResponseErrors(response));
+ this.request = request;
+ this.headers = headers;
+ this.response = response;
+ this.name = "GraphqlResponseError"; // Expose the errors and response data in their shorthand properties.
+
+ this.errors = response.errors;
+ this.data = response.data; // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+ }
+
+}
+
+const NON_VARIABLE_OPTIONS = ["method", "baseUrl", "url", "headers", "request", "query", "mediaType"];
+const FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"];
+const GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/;
+function graphql(request, query, options) {
+ if (options) {
+ if (typeof query === "string" && "query" in options) {
+ return Promise.reject(new Error(`[@octokit/graphql] "query" cannot be used as variable name`));
+ }
+
+ for (const key in options) {
+ if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) continue;
+ return Promise.reject(new Error(`[@octokit/graphql] "${key}" cannot be used as variable name`));
+ }
+ }
+
+ const parsedOptions = typeof query === "string" ? Object.assign({
+ query
+ }, options) : query;
+ const requestOptions = Object.keys(parsedOptions).reduce((result, key) => {
+ if (NON_VARIABLE_OPTIONS.includes(key)) {
+ result[key] = parsedOptions[key];
+ return result;
+ }
+
+ if (!result.variables) {
+ result.variables = {};
+ }
+
+ result.variables[key] = parsedOptions[key];
+ return result;
+ }, {}); // workaround for GitHub Enterprise baseUrl set with /api/v3 suffix
+ // https://github.com/octokit/auth-app.js/issues/111#issuecomment-657610451
+
+ const baseUrl = parsedOptions.baseUrl || request.endpoint.DEFAULTS.baseUrl;
+
+ if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) {
+ requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql");
+ }
+
+ return request(requestOptions).then(response => {
+ if (response.data.errors) {
+ const headers = {};
+
+ for (const key of Object.keys(response.headers)) {
+ headers[key] = response.headers[key];
+ }
+
+ throw new GraphqlResponseError(requestOptions, headers, response.data);
+ }
+
+ return response.data.data;
+ });
+}
+
+function withDefaults(request$1, newDefaults) {
+ const newRequest = request$1.defaults(newDefaults);
+
+ const newApi = (query, options) => {
+ return graphql(newRequest, query, options);
+ };
+
+ return Object.assign(newApi, {
+ defaults: withDefaults.bind(null, newRequest),
+ endpoint: request.request.endpoint
+ });
+}
+
+const graphql$1 = withDefaults(request.request, {
+ headers: {
+ "user-agent": `octokit-graphql.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ },
+ method: "POST",
+ url: "/graphql"
+});
+function withCustomRequest(customRequest) {
+ return withDefaults(customRequest, {
+ method: "POST",
+ url: "/graphql"
+ });
+}
+
+exports.GraphqlResponseError = GraphqlResponseError;
+exports.graphql = graphql$1;
+exports.withCustomRequest = withCustomRequest;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 4193:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+const VERSION = "2.21.3";
+
+function ownKeys(object, enumerableOnly) {
+ var keys = Object.keys(object);
+
+ if (Object.getOwnPropertySymbols) {
+ var symbols = Object.getOwnPropertySymbols(object);
+ enumerableOnly && (symbols = symbols.filter(function (sym) {
+ return Object.getOwnPropertyDescriptor(object, sym).enumerable;
+ })), keys.push.apply(keys, symbols);
+ }
+
+ return keys;
+}
+
+function _objectSpread2(target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = null != arguments[i] ? arguments[i] : {};
+ i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
+ _defineProperty(target, key, source[key]);
+ }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
+ Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
+ });
+ }
+
+ return target;
+}
+
+function _defineProperty(obj, key, value) {
+ if (key in obj) {
+ Object.defineProperty(obj, key, {
+ value: value,
+ enumerable: true,
+ configurable: true,
+ writable: true
+ });
+ } else {
+ obj[key] = value;
+ }
+
+ return obj;
+}
+
+/**
+ * Some “list” response that can be paginated have a different response structure
+ *
+ * They have a `total_count` key in the response (search also has `incomplete_results`,
+ * /installation/repositories also has `repository_selection`), as well as a key with
+ * the list of the items which name varies from endpoint to endpoint.
+ *
+ * Octokit normalizes these responses so that paginated results are always returned following
+ * the same structure. One challenge is that if the list response has only one page, no Link
+ * header is provided, so this header alone is not sufficient to check wether a response is
+ * paginated or not.
+ *
+ * We check if a "total_count" key is present in the response data, but also make sure that
+ * a "url" property is not, as the "Get the combined status for a specific ref" endpoint would
+ * otherwise match: https://developer.github.com/v3/repos/statuses/#get-the-combined-status-for-a-specific-ref
+ */
+function normalizePaginatedListResponse(response) {
+ // endpoints can respond with 204 if repository is empty
+ if (!response.data) {
+ return _objectSpread2(_objectSpread2({}, response), {}, {
+ data: []
+ });
+ }
+
+ const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data);
+ if (!responseNeedsNormalization) return response; // keep the additional properties intact as there is currently no other way
+ // to retrieve the same information.
+
+ const incompleteResults = response.data.incomplete_results;
+ const repositorySelection = response.data.repository_selection;
+ const totalCount = response.data.total_count;
+ delete response.data.incomplete_results;
+ delete response.data.repository_selection;
+ delete response.data.total_count;
+ const namespaceKey = Object.keys(response.data)[0];
+ const data = response.data[namespaceKey];
+ response.data = data;
+
+ if (typeof incompleteResults !== "undefined") {
+ response.data.incomplete_results = incompleteResults;
+ }
+
+ if (typeof repositorySelection !== "undefined") {
+ response.data.repository_selection = repositorySelection;
+ }
+
+ response.data.total_count = totalCount;
+ return response;
+}
+
+function iterator(octokit, route, parameters) {
+ const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters);
+ const requestMethod = typeof route === "function" ? route : octokit.request;
+ const method = options.method;
+ const headers = options.headers;
+ let url = options.url;
+ return {
+ [Symbol.asyncIterator]: () => ({
+ async next() {
+ if (!url) return {
+ done: true
+ };
+
+ try {
+ const response = await requestMethod({
+ method,
+ url,
+ headers
+ });
+ const normalizedResponse = normalizePaginatedListResponse(response); // `response.headers.link` format:
+ // '; rel="next", ; rel="last"'
+ // sets `url` to undefined if "next" URL is not present or `link` header is not set
+
+ url = ((normalizedResponse.headers.link || "").match(/<([^>]+)>;\s*rel="next"/) || [])[1];
+ return {
+ value: normalizedResponse
+ };
+ } catch (error) {
+ if (error.status !== 409) throw error;
+ url = "";
+ return {
+ value: {
+ status: 200,
+ headers: {},
+ data: []
+ }
+ };
+ }
+ }
+
+ })
+ };
+}
+
+function paginate(octokit, route, parameters, mapFn) {
+ if (typeof parameters === "function") {
+ mapFn = parameters;
+ parameters = undefined;
+ }
+
+ return gather(octokit, [], iterator(octokit, route, parameters)[Symbol.asyncIterator](), mapFn);
+}
+
+function gather(octokit, results, iterator, mapFn) {
+ return iterator.next().then(result => {
+ if (result.done) {
+ return results;
+ }
+
+ let earlyExit = false;
+
+ function done() {
+ earlyExit = true;
+ }
+
+ results = results.concat(mapFn ? mapFn(result.value, done) : result.value.data);
+
+ if (earlyExit) {
+ return results;
+ }
+
+ return gather(octokit, results, iterator, mapFn);
+ });
+}
+
+const composePaginateRest = Object.assign(paginate, {
+ iterator
+});
+
+const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
+
+function isPaginatingEndpoint(arg) {
+ if (typeof arg === "string") {
+ return paginatingEndpoints.includes(arg);
+ } else {
+ return false;
+ }
+}
+
+/**
+ * @param octokit Octokit instance
+ * @param options Options passed to Octokit constructor
+ */
+
+function paginateRest(octokit) {
+ return {
+ paginate: Object.assign(paginate.bind(null, octokit), {
+ iterator: iterator.bind(null, octokit)
+ })
+ };
+}
+paginateRest.VERSION = VERSION;
+
+exports.composePaginateRest = composePaginateRest;
+exports.isPaginatingEndpoint = isPaginatingEndpoint;
+exports.paginateRest = paginateRest;
+exports.paginatingEndpoints = paginatingEndpoints;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 3044:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function ownKeys(object, enumerableOnly) {
+ var keys = Object.keys(object);
+
+ if (Object.getOwnPropertySymbols) {
+ var symbols = Object.getOwnPropertySymbols(object);
+
+ if (enumerableOnly) {
+ symbols = symbols.filter(function (sym) {
+ return Object.getOwnPropertyDescriptor(object, sym).enumerable;
+ });
+ }
+
+ keys.push.apply(keys, symbols);
+ }
+
+ return keys;
+}
+
+function _objectSpread2(target) {
+ for (var i = 1; i < arguments.length; i++) {
+ var source = arguments[i] != null ? arguments[i] : {};
+
+ if (i % 2) {
+ ownKeys(Object(source), true).forEach(function (key) {
+ _defineProperty(target, key, source[key]);
+ });
+ } else if (Object.getOwnPropertyDescriptors) {
+ Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
+ } else {
+ ownKeys(Object(source)).forEach(function (key) {
+ Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
+ });
+ }
+ }
+
+ return target;
+}
+
+function _defineProperty(obj, key, value) {
+ if (key in obj) {
+ Object.defineProperty(obj, key, {
+ value: value,
+ enumerable: true,
+ configurable: true,
+ writable: true
+ });
+ } else {
+ obj[key] = value;
+ }
+
+ return obj;
+}
+
+const Endpoints = {
+ actions: {
+ addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
+ addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
+ addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
+ approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
+ cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
+ createOrUpdateEnvironmentSecret: ["PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
+ createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"],
+ createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}"],
createRegistrationTokenForOrg: ["POST /orgs/{org}/actions/runners/registration-token"],
createRegistrationTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/registration-token"],
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
@@ -4339,10459 +8638,30665 @@ const Endpoints = {
removeUserAccessRestrictions: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
mapToData: "users"
}],
- renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
- replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
- requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
- setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
- setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
- mapToData: "apps"
+ renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
+ replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
+ requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
+ setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
+ setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
+ mapToData: "apps"
+ }],
+ setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
+ mapToData: "contexts"
+ }],
+ setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
+ mapToData: "teams"
+ }],
+ setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
+ mapToData: "users"
+ }],
+ testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
+ transfer: ["POST /repos/{owner}/{repo}/transfer"],
+ update: ["PATCH /repos/{owner}/{repo}"],
+ updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
+ updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
+ updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
+ updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
+ updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
+ updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
+ updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
+ updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
+ renamed: ["repos", "updateStatusCheckProtection"]
+ }],
+ updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
+ updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
+ updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
+ uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
+ baseUrl: "https://uploads.github.com"
+ }]
+ },
+ search: {
+ code: ["GET /search/code"],
+ commits: ["GET /search/commits"],
+ issuesAndPullRequests: ["GET /search/issues"],
+ labels: ["GET /search/labels"],
+ repos: ["GET /search/repositories"],
+ topics: ["GET /search/topics"],
+ users: ["GET /search/users"]
+ },
+ secretScanning: {
+ getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
+ listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
+ listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
+ listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
+ listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
+ updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
+ },
+ teams: {
+ addOrUpdateMembershipForUserInOrg: ["PUT /orgs/{org}/teams/{team_slug}/memberships/{username}"],
+ addOrUpdateProjectPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
+ addOrUpdateRepoPermissionsInOrg: ["PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
+ checkPermissionsForProjectInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
+ checkPermissionsForRepoInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
+ create: ["POST /orgs/{org}/teams"],
+ createDiscussionCommentInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
+ createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"],
+ deleteDiscussionCommentInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
+ deleteDiscussionInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
+ deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"],
+ getByName: ["GET /orgs/{org}/teams/{team_slug}"],
+ getDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
+ getDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
+ getMembershipForUserInOrg: ["GET /orgs/{org}/teams/{team_slug}/memberships/{username}"],
+ list: ["GET /orgs/{org}/teams"],
+ listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"],
+ listDiscussionCommentsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments"],
+ listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"],
+ listForAuthenticatedUser: ["GET /user/teams"],
+ listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"],
+ listPendingInvitationsInOrg: ["GET /orgs/{org}/teams/{team_slug}/invitations"],
+ listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"],
+ listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"],
+ removeMembershipForUserInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}"],
+ removeProjectInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}"],
+ removeRepoInOrg: ["DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}"],
+ updateDiscussionCommentInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}"],
+ updateDiscussionInOrg: ["PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}"],
+ updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"]
+ },
+ users: {
+ addEmailForAuthenticated: ["POST /user/emails", {}, {
+ renamed: ["users", "addEmailForAuthenticatedUser"]
+ }],
+ addEmailForAuthenticatedUser: ["POST /user/emails"],
+ block: ["PUT /user/blocks/{username}"],
+ checkBlocked: ["GET /user/blocks/{username}"],
+ checkFollowingForUser: ["GET /users/{username}/following/{target_user}"],
+ checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"],
+ createGpgKeyForAuthenticated: ["POST /user/gpg_keys", {}, {
+ renamed: ["users", "createGpgKeyForAuthenticatedUser"]
+ }],
+ createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"],
+ createPublicSshKeyForAuthenticated: ["POST /user/keys", {}, {
+ renamed: ["users", "createPublicSshKeyForAuthenticatedUser"]
+ }],
+ createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"],
+ deleteEmailForAuthenticated: ["DELETE /user/emails", {}, {
+ renamed: ["users", "deleteEmailForAuthenticatedUser"]
+ }],
+ deleteEmailForAuthenticatedUser: ["DELETE /user/emails"],
+ deleteGpgKeyForAuthenticated: ["DELETE /user/gpg_keys/{gpg_key_id}", {}, {
+ renamed: ["users", "deleteGpgKeyForAuthenticatedUser"]
+ }],
+ deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"],
+ deletePublicSshKeyForAuthenticated: ["DELETE /user/keys/{key_id}", {}, {
+ renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"]
+ }],
+ deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"],
+ follow: ["PUT /user/following/{username}"],
+ getAuthenticated: ["GET /user"],
+ getByUsername: ["GET /users/{username}"],
+ getContextForUser: ["GET /users/{username}/hovercard"],
+ getGpgKeyForAuthenticated: ["GET /user/gpg_keys/{gpg_key_id}", {}, {
+ renamed: ["users", "getGpgKeyForAuthenticatedUser"]
+ }],
+ getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"],
+ getPublicSshKeyForAuthenticated: ["GET /user/keys/{key_id}", {}, {
+ renamed: ["users", "getPublicSshKeyForAuthenticatedUser"]
+ }],
+ getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"],
+ list: ["GET /users"],
+ listBlockedByAuthenticated: ["GET /user/blocks", {}, {
+ renamed: ["users", "listBlockedByAuthenticatedUser"]
+ }],
+ listBlockedByAuthenticatedUser: ["GET /user/blocks"],
+ listEmailsForAuthenticated: ["GET /user/emails", {}, {
+ renamed: ["users", "listEmailsForAuthenticatedUser"]
}],
- setStatusCheckContexts: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", {}, {
- mapToData: "contexts"
+ listEmailsForAuthenticatedUser: ["GET /user/emails"],
+ listFollowedByAuthenticated: ["GET /user/following", {}, {
+ renamed: ["users", "listFollowedByAuthenticatedUser"]
}],
- setTeamAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", {}, {
- mapToData: "teams"
+ listFollowedByAuthenticatedUser: ["GET /user/following"],
+ listFollowersForAuthenticatedUser: ["GET /user/followers"],
+ listFollowersForUser: ["GET /users/{username}/followers"],
+ listFollowingForUser: ["GET /users/{username}/following"],
+ listGpgKeysForAuthenticated: ["GET /user/gpg_keys", {}, {
+ renamed: ["users", "listGpgKeysForAuthenticatedUser"]
}],
- setUserAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", {}, {
- mapToData: "users"
+ listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"],
+ listGpgKeysForUser: ["GET /users/{username}/gpg_keys"],
+ listPublicEmailsForAuthenticated: ["GET /user/public_emails", {}, {
+ renamed: ["users", "listPublicEmailsForAuthenticatedUser"]
}],
- testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"],
- transfer: ["POST /repos/{owner}/{repo}/transfer"],
- update: ["PATCH /repos/{owner}/{repo}"],
- updateBranchProtection: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection"],
- updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"],
- updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"],
- updateInvitation: ["PATCH /repos/{owner}/{repo}/invitations/{invitation_id}"],
- updatePullRequestReviewProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
- updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"],
- updateReleaseAsset: ["PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}"],
- updateStatusCheckPotection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", {}, {
- renamed: ["repos", "updateStatusCheckProtection"]
+ listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"],
+ listPublicKeysForUser: ["GET /users/{username}/keys"],
+ listPublicSshKeysForAuthenticated: ["GET /user/keys", {}, {
+ renamed: ["users", "listPublicSshKeysForAuthenticatedUser"]
}],
- updateStatusCheckProtection: ["PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks"],
- updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"],
- updateWebhookConfigForRepo: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config"],
- uploadReleaseAsset: ["POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", {
- baseUrl: "https://uploads.github.com"
- }]
- },
- search: {
- code: ["GET /search/code"],
- commits: ["GET /search/commits"],
- issuesAndPullRequests: ["GET /search/issues"],
- labels: ["GET /search/labels"],
- repos: ["GET /search/repositories"],
- topics: ["GET /search/topics"],
- users: ["GET /search/users"]
- },
- secretScanning: {
- getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
- listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
- listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
- listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
- listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
- updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
+ listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"],
+ setPrimaryEmailVisibilityForAuthenticated: ["PATCH /user/email/visibility", {}, {
+ renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"]
+ }],
+ setPrimaryEmailVisibilityForAuthenticatedUser: ["PATCH /user/email/visibility"],
+ unblock: ["DELETE /user/blocks/{username}"],
+ unfollow: ["DELETE /user/following/{username}"],
+ updateAuthenticated: ["PATCH /user"]
+ }
+};
+
+const VERSION = "5.16.2";
+
+function endpointsToMethods(octokit, endpointsMap) {
+ const newMethods = {};
+
+ for (const [scope, endpoints] of Object.entries(endpointsMap)) {
+ for (const [methodName, endpoint] of Object.entries(endpoints)) {
+ const [route, defaults, decorations] = endpoint;
+ const [method, url] = route.split(/ /);
+ const endpointDefaults = Object.assign({
+ method,
+ url
+ }, defaults);
+
+ if (!newMethods[scope]) {
+ newMethods[scope] = {};
+ }
+
+ const scopeMethods = newMethods[scope];
+
+ if (decorations) {
+ scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
+ continue;
+ }
+
+ scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
+ }
+ }
+
+ return newMethods;
+}
+
+function decorate(octokit, scope, methodName, defaults, decorations) {
+ const requestWithDefaults = octokit.request.defaults(defaults);
+ /* istanbul ignore next */
+
+ function withDecorations(...args) {
+ // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+ let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
+
+ if (decorations.mapToData) {
+ options = Object.assign({}, options, {
+ data: options[decorations.mapToData],
+ [decorations.mapToData]: undefined
+ });
+ return requestWithDefaults(options);
+ }
+
+ if (decorations.renamed) {
+ const [newScope, newMethodName] = decorations.renamed;
+ octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
+ }
+
+ if (decorations.deprecated) {
+ octokit.log.warn(decorations.deprecated);
+ }
+
+ if (decorations.renamedParameters) {
+ // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+ const options = requestWithDefaults.endpoint.merge(...args);
+
+ for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
+ if (name in options) {
+ octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
+
+ if (!(alias in options)) {
+ options[alias] = options[name];
+ }
+
+ delete options[name];
+ }
+ }
+
+ return requestWithDefaults(options);
+ } // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
+
+
+ return requestWithDefaults(...args);
+ }
+
+ return Object.assign(withDecorations, requestWithDefaults);
+}
+
+function restEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit, Endpoints);
+ return {
+ rest: api
+ };
+}
+restEndpointMethods.VERSION = VERSION;
+function legacyRestEndpointMethods(octokit) {
+ const api = endpointsToMethods(octokit, Endpoints);
+ return _objectSpread2(_objectSpread2({}, api), {}, {
+ rest: api
+ });
+}
+legacyRestEndpointMethods.VERSION = VERSION;
+
+exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
+exports.restEndpointMethods = restEndpointMethods;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 537:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var deprecation = __nccwpck_require__(8932);
+var once = _interopDefault(__nccwpck_require__(1223));
+
+const logOnceCode = once(deprecation => console.warn(deprecation));
+const logOnceHeaders = once(deprecation => console.warn(deprecation));
+/**
+ * Error with extra properties to help with debugging
+ */
+
+class RequestError extends Error {
+ constructor(message, statusCode, options) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = "HttpError";
+ this.status = statusCode;
+ let headers;
+
+ if ("headers" in options && typeof options.headers !== "undefined") {
+ headers = options.headers;
+ }
+
+ if ("response" in options) {
+ this.response = options.response;
+ headers = options.response.headers;
+ } // redact request credentials without mutating original request options
+
+
+ const requestCopy = Object.assign({}, options.request);
+
+ if (options.request.headers.authorization) {
+ requestCopy.headers = Object.assign({}, options.request.headers, {
+ authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
+ });
+ }
+
+ requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
+ // see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
+ .replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
+ // see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
+ .replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
+ this.request = requestCopy; // deprecations
+
+ Object.defineProperty(this, "code", {
+ get() {
+ logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
+ return statusCode;
+ }
+
+ });
+ Object.defineProperty(this, "headers", {
+ get() {
+ logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
+ return headers || {};
+ }
+
+ });
+ }
+
+}
+
+exports.RequestError = RequestError;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 6234:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
+
+var endpoint = __nccwpck_require__(9440);
+var universalUserAgent = __nccwpck_require__(5030);
+var isPlainObject = __nccwpck_require__(3287);
+var nodeFetch = _interopDefault(__nccwpck_require__(467));
+var requestError = __nccwpck_require__(537);
+
+const VERSION = "5.6.3";
+
+function getBufferResponse(response) {
+ return response.arrayBuffer();
+}
+
+function fetchWrapper(requestOptions) {
+ const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
+
+ if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
+ requestOptions.body = JSON.stringify(requestOptions.body);
+ }
+
+ let headers = {};
+ let status;
+ let url;
+ const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
+ return fetch(requestOptions.url, Object.assign({
+ method: requestOptions.method,
+ body: requestOptions.body,
+ headers: requestOptions.headers,
+ redirect: requestOptions.redirect
+ }, // `requestOptions.request.agent` type is incompatible
+ // see https://github.com/octokit/types.ts/pull/264
+ requestOptions.request)).then(async response => {
+ url = response.url;
+ status = response.status;
+
+ for (const keyAndValue of response.headers) {
+ headers[keyAndValue[0]] = keyAndValue[1];
+ }
+
+ if ("deprecation" in headers) {
+ const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
+ const deprecationLink = matches && matches.pop();
+ log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
+ }
+
+ if (status === 204 || status === 205) {
+ return;
+ } // GitHub API returns 200 for HEAD requests
+
+
+ if (requestOptions.method === "HEAD") {
+ if (status < 400) {
+ return;
+ }
+
+ throw new requestError.RequestError(response.statusText, status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: undefined
+ },
+ request: requestOptions
+ });
+ }
+
+ if (status === 304) {
+ throw new requestError.RequestError("Not modified", status, {
+ response: {
+ url,
+ status,
+ headers,
+ data: await getResponseData(response)
+ },
+ request: requestOptions
+ });
+ }
+
+ if (status >= 400) {
+ const data = await getResponseData(response);
+ const error = new requestError.RequestError(toErrorMessage(data), status, {
+ response: {
+ url,
+ status,
+ headers,
+ data
+ },
+ request: requestOptions
+ });
+ throw error;
+ }
+
+ return getResponseData(response);
+ }).then(data => {
+ return {
+ status,
+ url,
+ headers,
+ data
+ };
+ }).catch(error => {
+ if (error instanceof requestError.RequestError) throw error;
+ throw new requestError.RequestError(error.message, 500, {
+ request: requestOptions
+ });
+ });
+}
+
+async function getResponseData(response) {
+ const contentType = response.headers.get("content-type");
+
+ if (/application\/json/.test(contentType)) {
+ return response.json();
+ }
+
+ if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
+ return response.text();
+ }
+
+ return getBufferResponse(response);
+}
+
+function toErrorMessage(data) {
+ if (typeof data === "string") return data; // istanbul ignore else - just in case
+
+ if ("message" in data) {
+ if (Array.isArray(data.errors)) {
+ return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
+ }
+
+ return data.message;
+ } // istanbul ignore next - just in case
+
+
+ return `Unknown error: ${JSON.stringify(data)}`;
+}
+
+function withDefaults(oldEndpoint, newDefaults) {
+ const endpoint = oldEndpoint.defaults(newDefaults);
+
+ const newApi = function (route, parameters) {
+ const endpointOptions = endpoint.merge(route, parameters);
+
+ if (!endpointOptions.request || !endpointOptions.request.hook) {
+ return fetchWrapper(endpoint.parse(endpointOptions));
+ }
+
+ const request = (route, parameters) => {
+ return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
+ };
+
+ Object.assign(request, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+ return endpointOptions.request.hook(request, endpointOptions);
+ };
+
+ return Object.assign(newApi, {
+ endpoint,
+ defaults: withDefaults.bind(null, endpoint)
+ });
+}
+
+const request = withDefaults(endpoint.endpoint, {
+ headers: {
+ "user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
+ }
+});
+
+exports.request = request;
+//# sourceMappingURL=index.js.map
+
+
+/***/ }),
+
+/***/ 9417:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ if(a===b) {
+ return [ai, bi];
+ }
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
+/***/ }),
+
+/***/ 3682:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var register = __nccwpck_require__(4670);
+var addHook = __nccwpck_require__(5549);
+var removeHook = __nccwpck_require__(6819);
+
+// bind with array of arguments: https://stackoverflow.com/a/21792913
+var bind = Function.bind;
+var bindable = bind.bind(bind);
+
+function bindApi(hook, state, name) {
+ var removeHookRef = bindable(removeHook, null).apply(
+ null,
+ name ? [state, name] : [state]
+ );
+ hook.api = { remove: removeHookRef };
+ hook.remove = removeHookRef;
+ ["before", "error", "after", "wrap"].forEach(function (kind) {
+ var args = name ? [state, kind, name] : [state, kind];
+ hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args);
+ });
+}
+
+function HookSingular() {
+ var singularHookName = "h";
+ var singularHookState = {
+ registry: {},
+ };
+ var singularHook = register.bind(null, singularHookState, singularHookName);
+ bindApi(singularHook, singularHookState, singularHookName);
+ return singularHook;
+}
+
+function HookCollection() {
+ var state = {
+ registry: {},
+ };
+
+ var hook = register.bind(null, state);
+ bindApi(hook, state);
+
+ return hook;
+}
+
+var collectionHookDeprecationMessageDisplayed = false;
+function Hook() {
+ if (!collectionHookDeprecationMessageDisplayed) {
+ console.warn(
+ '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4'
+ );
+ collectionHookDeprecationMessageDisplayed = true;
+ }
+ return HookCollection();
+}
+
+Hook.Singular = HookSingular.bind();
+Hook.Collection = HookCollection.bind();
+
+module.exports = Hook;
+// expose constructors as a named property for TypeScript
+module.exports.Hook = Hook;
+module.exports.Singular = Hook.Singular;
+module.exports.Collection = Hook.Collection;
+
+
+/***/ }),
+
+/***/ 5549:
+/***/ ((module) => {
+
+module.exports = addHook;
+
+function addHook(state, kind, name, hook) {
+ var orig = hook;
+ if (!state.registry[name]) {
+ state.registry[name] = [];
+ }
+
+ if (kind === "before") {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(orig.bind(null, options))
+ .then(method.bind(null, options));
+ };
+ }
+
+ if (kind === "after") {
+ hook = function (method, options) {
+ var result;
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .then(function (result_) {
+ result = result_;
+ return orig(result, options);
+ })
+ .then(function () {
+ return result;
+ });
+ };
+ }
+
+ if (kind === "error") {
+ hook = function (method, options) {
+ return Promise.resolve()
+ .then(method.bind(null, options))
+ .catch(function (error) {
+ return orig(error, options);
+ });
+ };
+ }
+
+ state.registry[name].push({
+ hook: hook,
+ orig: orig,
+ });
+}
+
+
+/***/ }),
+
+/***/ 4670:
+/***/ ((module) => {
+
+module.exports = register;
+
+function register(state, name, method, options) {
+ if (typeof method !== "function") {
+ throw new Error("method for before hook must be a function");
+ }
+
+ if (!options) {
+ options = {};
+ }
+
+ if (Array.isArray(name)) {
+ return name.reverse().reduce(function (callback, name) {
+ return register.bind(null, state, name, callback, options);
+ }, method)();
+ }
+
+ return Promise.resolve().then(function () {
+ if (!state.registry[name]) {
+ return method(options);
+ }
+
+ return state.registry[name].reduce(function (method, registered) {
+ return registered.hook.bind(null, method, options);
+ }, method)();
+ });
+}
+
+
+/***/ }),
+
+/***/ 6819:
+/***/ ((module) => {
+
+module.exports = removeHook;
+
+function removeHook(state, name, method) {
+ if (!state.registry[name]) {
+ return;
+ }
+
+ var index = state.registry[name]
+ .map(function (registered) {
+ return registered.orig;
+ })
+ .indexOf(method);
+
+ if (index === -1) {
+ return;
+ }
+
+ state.registry[name].splice(index, 1);
+}
+
+
+/***/ }),
+
+/***/ 3717:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var concatMap = __nccwpck_require__(6891);
+var balanced = __nccwpck_require__(9417);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function identity(e) {
+ return e;
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m || /\$$/.test(m.pre)) return [str];
+
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = concatMap(n, function(el) { return expand(el, false) });
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 610:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const stringify = __nccwpck_require__(8750);
+const compile = __nccwpck_require__(9434);
+const expand = __nccwpck_require__(5873);
+const parse = __nccwpck_require__(6477);
+
+/**
+ * Expand the given pattern or create a regex-compatible string.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces('{a,b,c}', { compile: true })); //=> ['(a|b|c)']
+ * console.log(braces('{a,b,c}')); //=> ['a', 'b', 'c']
+ * ```
+ * @param {String} `str`
+ * @param {Object} `options`
+ * @return {String}
+ * @api public
+ */
+
+const braces = (input, options = {}) => {
+ let output = [];
+
+ if (Array.isArray(input)) {
+ for (let pattern of input) {
+ let result = braces.create(pattern, options);
+ if (Array.isArray(result)) {
+ output.push(...result);
+ } else {
+ output.push(result);
+ }
+ }
+ } else {
+ output = [].concat(braces.create(input, options));
+ }
+
+ if (options && options.expand === true && options.nodupes === true) {
+ output = [...new Set(output)];
+ }
+ return output;
+};
+
+/**
+ * Parse the given `str` with the given `options`.
+ *
+ * ```js
+ * // braces.parse(pattern, [, options]);
+ * const ast = braces.parse('a/{b,c}/d');
+ * console.log(ast);
+ * ```
+ * @param {String} pattern Brace pattern to parse
+ * @param {Object} options
+ * @return {Object} Returns an AST
+ * @api public
+ */
+
+braces.parse = (input, options = {}) => parse(input, options);
+
+/**
+ * Creates a braces string from an AST, or an AST node.
+ *
+ * ```js
+ * const braces = require('braces');
+ * let ast = braces.parse('foo/{a,b}/bar');
+ * console.log(stringify(ast.nodes[2])); //=> '{a,b}'
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.stringify = (input, options = {}) => {
+ if (typeof input === 'string') {
+ return stringify(braces.parse(input, options), options);
+ }
+ return stringify(input, options);
+};
+
+/**
+ * Compiles a brace pattern into a regex-compatible, optimized string.
+ * This method is called by the main [braces](#braces) function by default.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.compile('a/{b,c}/d'));
+ * //=> ['a/(b|c)/d']
+ * ```
+ * @param {String} `input` Brace pattern or AST.
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.compile = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+ return compile(input, options);
+};
+
+/**
+ * Expands a brace pattern into an array. This method is called by the
+ * main [braces](#braces) function when `options.expand` is true. Before
+ * using this method it's recommended that you read the [performance notes](#performance))
+ * and advantages of using [.compile](#compile) instead.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.expand('a/{b,c}/d'));
+ * //=> ['a/b/d', 'a/c/d'];
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.expand = (input, options = {}) => {
+ if (typeof input === 'string') {
+ input = braces.parse(input, options);
+ }
+
+ let result = expand(input, options);
+
+ // filter out empty strings if specified
+ if (options.noempty === true) {
+ result = result.filter(Boolean);
+ }
+
+ // filter out duplicates if specified
+ if (options.nodupes === true) {
+ result = [...new Set(result)];
+ }
+
+ return result;
+};
+
+/**
+ * Processes a brace pattern and returns either an expanded array
+ * (if `options.expand` is true), a highly optimized regex-compatible string.
+ * This method is called by the main [braces](#braces) function.
+ *
+ * ```js
+ * const braces = require('braces');
+ * console.log(braces.create('user-{200..300}/project-{a,b,c}-{1..10}'))
+ * //=> 'user-(20[0-9]|2[1-9][0-9]|300)/project-(a|b|c)-([1-9]|10)'
+ * ```
+ * @param {String} `pattern` Brace pattern
+ * @param {Object} `options`
+ * @return {Array} Returns an array of expanded values.
+ * @api public
+ */
+
+braces.create = (input, options = {}) => {
+ if (input === '' || input.length < 3) {
+ return [input];
+ }
+
+ return options.expand !== true
+ ? braces.compile(input, options)
+ : braces.expand(input, options);
+};
+
+/**
+ * Expose "braces"
+ */
+
+module.exports = braces;
+
+
+/***/ }),
+
+/***/ 9434:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const fill = __nccwpck_require__(6330);
+const utils = __nccwpck_require__(5207);
+
+const compile = (ast, options = {}) => {
+ let walk = (node, parent = {}) => {
+ let invalidBlock = utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let invalid = invalidBlock === true || invalidNode === true;
+ let prefix = options.escapeInvalid === true ? '\\' : '';
+ let output = '';
+
+ if (node.isOpen === true) {
+ return prefix + node.value;
+ }
+ if (node.isClose === true) {
+ return prefix + node.value;
+ }
+
+ if (node.type === 'open') {
+ return invalid ? (prefix + node.value) : '(';
+ }
+
+ if (node.type === 'close') {
+ return invalid ? (prefix + node.value) : ')';
+ }
+
+ if (node.type === 'comma') {
+ return node.prev.type === 'comma' ? '' : (invalid ? node.value : '|');
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+ let range = fill(...args, { ...options, wrap: false, toRegex: true });
+
+ if (range.length !== 0) {
+ return args.length > 1 && range.length > 1 ? `(${range})` : range;
+ }
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += walk(child, node);
+ }
+ }
+ return output;
+ };
+
+ return walk(ast);
+};
+
+module.exports = compile;
+
+
+/***/ }),
+
+/***/ 8774:
+/***/ ((module) => {
+
+"use strict";
+
+
+module.exports = {
+ MAX_LENGTH: 1024 * 64,
+
+ // Digits
+ CHAR_0: '0', /* 0 */
+ CHAR_9: '9', /* 9 */
+
+ // Alphabet chars.
+ CHAR_UPPERCASE_A: 'A', /* A */
+ CHAR_LOWERCASE_A: 'a', /* a */
+ CHAR_UPPERCASE_Z: 'Z', /* Z */
+ CHAR_LOWERCASE_Z: 'z', /* z */
+
+ CHAR_LEFT_PARENTHESES: '(', /* ( */
+ CHAR_RIGHT_PARENTHESES: ')', /* ) */
+
+ CHAR_ASTERISK: '*', /* * */
+
+ // Non-alphabetic chars.
+ CHAR_AMPERSAND: '&', /* & */
+ CHAR_AT: '@', /* @ */
+ CHAR_BACKSLASH: '\\', /* \ */
+ CHAR_BACKTICK: '`', /* ` */
+ CHAR_CARRIAGE_RETURN: '\r', /* \r */
+ CHAR_CIRCUMFLEX_ACCENT: '^', /* ^ */
+ CHAR_COLON: ':', /* : */
+ CHAR_COMMA: ',', /* , */
+ CHAR_DOLLAR: '$', /* . */
+ CHAR_DOT: '.', /* . */
+ CHAR_DOUBLE_QUOTE: '"', /* " */
+ CHAR_EQUAL: '=', /* = */
+ CHAR_EXCLAMATION_MARK: '!', /* ! */
+ CHAR_FORM_FEED: '\f', /* \f */
+ CHAR_FORWARD_SLASH: '/', /* / */
+ CHAR_HASH: '#', /* # */
+ CHAR_HYPHEN_MINUS: '-', /* - */
+ CHAR_LEFT_ANGLE_BRACKET: '<', /* < */
+ CHAR_LEFT_CURLY_BRACE: '{', /* { */
+ CHAR_LEFT_SQUARE_BRACKET: '[', /* [ */
+ CHAR_LINE_FEED: '\n', /* \n */
+ CHAR_NO_BREAK_SPACE: '\u00A0', /* \u00A0 */
+ CHAR_PERCENT: '%', /* % */
+ CHAR_PLUS: '+', /* + */
+ CHAR_QUESTION_MARK: '?', /* ? */
+ CHAR_RIGHT_ANGLE_BRACKET: '>', /* > */
+ CHAR_RIGHT_CURLY_BRACE: '}', /* } */
+ CHAR_RIGHT_SQUARE_BRACKET: ']', /* ] */
+ CHAR_SEMICOLON: ';', /* ; */
+ CHAR_SINGLE_QUOTE: '\'', /* ' */
+ CHAR_SPACE: ' ', /* */
+ CHAR_TAB: '\t', /* \t */
+ CHAR_UNDERSCORE: '_', /* _ */
+ CHAR_VERTICAL_LINE: '|', /* | */
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE: '\uFEFF' /* \uFEFF */
+};
+
+
+/***/ }),
+
+/***/ 5873:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const fill = __nccwpck_require__(6330);
+const stringify = __nccwpck_require__(8750);
+const utils = __nccwpck_require__(5207);
+
+const append = (queue = '', stash = '', enclose = false) => {
+ let result = [];
+
+ queue = [].concat(queue);
+ stash = [].concat(stash);
+
+ if (!stash.length) return queue;
+ if (!queue.length) {
+ return enclose ? utils.flatten(stash).map(ele => `{${ele}}`) : stash;
+ }
+
+ for (let item of queue) {
+ if (Array.isArray(item)) {
+ for (let value of item) {
+ result.push(append(value, stash, enclose));
+ }
+ } else {
+ for (let ele of stash) {
+ if (enclose === true && typeof ele === 'string') ele = `{${ele}}`;
+ result.push(Array.isArray(ele) ? append(item, ele, enclose) : (item + ele));
+ }
+ }
+ }
+ return utils.flatten(result);
+};
+
+const expand = (ast, options = {}) => {
+ let rangeLimit = options.rangeLimit === void 0 ? 1000 : options.rangeLimit;
+
+ let walk = (node, parent = {}) => {
+ node.queue = [];
+
+ let p = parent;
+ let q = parent.queue;
+
+ while (p.type !== 'brace' && p.type !== 'root' && p.parent) {
+ p = p.parent;
+ q = p.queue;
+ }
+
+ if (node.invalid || node.dollar) {
+ q.push(append(q.pop(), stringify(node, options)));
+ return;
+ }
+
+ if (node.type === 'brace' && node.invalid !== true && node.nodes.length === 2) {
+ q.push(append(q.pop(), ['{}']));
+ return;
+ }
+
+ if (node.nodes && node.ranges > 0) {
+ let args = utils.reduce(node.nodes);
+
+ if (utils.exceedsLimit(...args, options.step, rangeLimit)) {
+ throw new RangeError('expanded array length exceeds range limit. Use options.rangeLimit to increase or disable the limit.');
+ }
+
+ let range = fill(...args, options);
+ if (range.length === 0) {
+ range = stringify(node, options);
+ }
+
+ q.push(append(q.pop(), range));
+ node.nodes = [];
+ return;
+ }
+
+ let enclose = utils.encloseBrace(node);
+ let queue = node.queue;
+ let block = node;
+
+ while (block.type !== 'brace' && block.type !== 'root' && block.parent) {
+ block = block.parent;
+ queue = block.queue;
+ }
+
+ for (let i = 0; i < node.nodes.length; i++) {
+ let child = node.nodes[i];
+
+ if (child.type === 'comma' && node.type === 'brace') {
+ if (i === 1) queue.push('');
+ queue.push('');
+ continue;
+ }
+
+ if (child.type === 'close') {
+ q.push(append(q.pop(), queue, enclose));
+ continue;
+ }
+
+ if (child.value && child.type !== 'open') {
+ queue.push(append(queue.pop(), child.value));
+ continue;
+ }
+
+ if (child.nodes) {
+ walk(child, node);
+ }
+ }
+
+ return queue;
+ };
+
+ return utils.flatten(walk(ast));
+};
+
+module.exports = expand;
+
+
+/***/ }),
+
+/***/ 6477:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const stringify = __nccwpck_require__(8750);
+
+/**
+ * Constants
+ */
+
+const {
+ MAX_LENGTH,
+ CHAR_BACKSLASH, /* \ */
+ CHAR_BACKTICK, /* ` */
+ CHAR_COMMA, /* , */
+ CHAR_DOT, /* . */
+ CHAR_LEFT_PARENTHESES, /* ( */
+ CHAR_RIGHT_PARENTHESES, /* ) */
+ CHAR_LEFT_CURLY_BRACE, /* { */
+ CHAR_RIGHT_CURLY_BRACE, /* } */
+ CHAR_LEFT_SQUARE_BRACKET, /* [ */
+ CHAR_RIGHT_SQUARE_BRACKET, /* ] */
+ CHAR_DOUBLE_QUOTE, /* " */
+ CHAR_SINGLE_QUOTE, /* ' */
+ CHAR_NO_BREAK_SPACE,
+ CHAR_ZERO_WIDTH_NOBREAK_SPACE
+} = __nccwpck_require__(8774);
+
+/**
+ * parse
+ */
+
+const parse = (input, options = {}) => {
+ if (typeof input !== 'string') {
+ throw new TypeError('Expected a string');
+ }
+
+ let opts = options || {};
+ let max = typeof opts.maxLength === 'number' ? Math.min(MAX_LENGTH, opts.maxLength) : MAX_LENGTH;
+ if (input.length > max) {
+ throw new SyntaxError(`Input length (${input.length}), exceeds max characters (${max})`);
+ }
+
+ let ast = { type: 'root', input, nodes: [] };
+ let stack = [ast];
+ let block = ast;
+ let prev = ast;
+ let brackets = 0;
+ let length = input.length;
+ let index = 0;
+ let depth = 0;
+ let value;
+ let memo = {};
+
+ /**
+ * Helpers
+ */
+
+ const advance = () => input[index++];
+ const push = node => {
+ if (node.type === 'text' && prev.type === 'dot') {
+ prev.type = 'text';
+ }
+
+ if (prev && prev.type === 'text' && node.type === 'text') {
+ prev.value += node.value;
+ return;
+ }
+
+ block.nodes.push(node);
+ node.parent = block;
+ node.prev = prev;
+ prev = node;
+ return node;
+ };
+
+ push({ type: 'bos' });
+
+ while (index < length) {
+ block = stack[stack.length - 1];
+ value = advance();
+
+ /**
+ * Invalid chars
+ */
+
+ if (value === CHAR_ZERO_WIDTH_NOBREAK_SPACE || value === CHAR_NO_BREAK_SPACE) {
+ continue;
+ }
+
+ /**
+ * Escaped chars
+ */
+
+ if (value === CHAR_BACKSLASH) {
+ push({ type: 'text', value: (options.keepEscaping ? value : '') + advance() });
+ continue;
+ }
+
+ /**
+ * Right square bracket (literal): ']'
+ */
+
+ if (value === CHAR_RIGHT_SQUARE_BRACKET) {
+ push({ type: 'text', value: '\\' + value });
+ continue;
+ }
+
+ /**
+ * Left square bracket: '['
+ */
+
+ if (value === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+
+ let closed = true;
+ let next;
+
+ while (index < length && (next = advance())) {
+ value += next;
+
+ if (next === CHAR_LEFT_SQUARE_BRACKET) {
+ brackets++;
+ continue;
+ }
+
+ if (next === CHAR_BACKSLASH) {
+ value += advance();
+ continue;
+ }
+
+ if (next === CHAR_RIGHT_SQUARE_BRACKET) {
+ brackets--;
+
+ if (brackets === 0) {
+ break;
+ }
+ }
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Parentheses
+ */
+
+ if (value === CHAR_LEFT_PARENTHESES) {
+ block = push({ type: 'paren', nodes: [] });
+ stack.push(block);
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (value === CHAR_RIGHT_PARENTHESES) {
+ if (block.type !== 'paren') {
+ push({ type: 'text', value });
+ continue;
+ }
+ block = stack.pop();
+ push({ type: 'text', value });
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Quotes: '|"|`
+ */
+
+ if (value === CHAR_DOUBLE_QUOTE || value === CHAR_SINGLE_QUOTE || value === CHAR_BACKTICK) {
+ let open = value;
+ let next;
+
+ if (options.keepQuotes !== true) {
+ value = '';
+ }
+
+ while (index < length && (next = advance())) {
+ if (next === CHAR_BACKSLASH) {
+ value += next + advance();
+ continue;
+ }
+
+ if (next === open) {
+ if (options.keepQuotes === true) value += next;
+ break;
+ }
+
+ value += next;
+ }
+
+ push({ type: 'text', value });
+ continue;
+ }
+
+ /**
+ * Left curly brace: '{'
+ */
+
+ if (value === CHAR_LEFT_CURLY_BRACE) {
+ depth++;
+
+ let dollar = prev.value && prev.value.slice(-1) === '$' || block.dollar === true;
+ let brace = {
+ type: 'brace',
+ open: true,
+ close: false,
+ dollar,
+ depth,
+ commas: 0,
+ ranges: 0,
+ nodes: []
+ };
+
+ block = push(brace);
+ stack.push(block);
+ push({ type: 'open', value });
+ continue;
+ }
+
+ /**
+ * Right curly brace: '}'
+ */
+
+ if (value === CHAR_RIGHT_CURLY_BRACE) {
+ if (block.type !== 'brace') {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ let type = 'close';
+ block = stack.pop();
+ block.close = true;
+
+ push({ type, value });
+ depth--;
+
+ block = stack[stack.length - 1];
+ continue;
+ }
+
+ /**
+ * Comma: ','
+ */
+
+ if (value === CHAR_COMMA && depth > 0) {
+ if (block.ranges > 0) {
+ block.ranges = 0;
+ let open = block.nodes.shift();
+ block.nodes = [open, { type: 'text', value: stringify(block) }];
+ }
+
+ push({ type: 'comma', value });
+ block.commas++;
+ continue;
+ }
+
+ /**
+ * Dot: '.'
+ */
+
+ if (value === CHAR_DOT && depth > 0 && block.commas === 0) {
+ let siblings = block.nodes;
+
+ if (depth === 0 || siblings.length === 0) {
+ push({ type: 'text', value });
+ continue;
+ }
+
+ if (prev.type === 'dot') {
+ block.range = [];
+ prev.value += value;
+ prev.type = 'range';
+
+ if (block.nodes.length !== 3 && block.nodes.length !== 5) {
+ block.invalid = true;
+ block.ranges = 0;
+ prev.type = 'text';
+ continue;
+ }
+
+ block.ranges++;
+ block.args = [];
+ continue;
+ }
+
+ if (prev.type === 'range') {
+ siblings.pop();
+
+ let before = siblings[siblings.length - 1];
+ before.value += prev.value + value;
+ prev = before;
+ block.ranges--;
+ continue;
+ }
+
+ push({ type: 'dot', value });
+ continue;
+ }
+
+ /**
+ * Text
+ */
+
+ push({ type: 'text', value });
+ }
+
+ // Mark imbalanced braces and brackets as invalid
+ do {
+ block = stack.pop();
+
+ if (block.type !== 'root') {
+ block.nodes.forEach(node => {
+ if (!node.nodes) {
+ if (node.type === 'open') node.isOpen = true;
+ if (node.type === 'close') node.isClose = true;
+ if (!node.nodes) node.type = 'text';
+ node.invalid = true;
+ }
+ });
+
+ // get the location of the block on parent.nodes (block's siblings)
+ let parent = stack[stack.length - 1];
+ let index = parent.nodes.indexOf(block);
+ // replace the (invalid) block with it's nodes
+ parent.nodes.splice(index, 1, ...block.nodes);
+ }
+ } while (stack.length > 0);
+
+ push({ type: 'eos' });
+ return ast;
+};
+
+module.exports = parse;
+
+
+/***/ }),
+
+/***/ 8750:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+const utils = __nccwpck_require__(5207);
+
+module.exports = (ast, options = {}) => {
+ let stringify = (node, parent = {}) => {
+ let invalidBlock = options.escapeInvalid && utils.isInvalidBrace(parent);
+ let invalidNode = node.invalid === true && options.escapeInvalid === true;
+ let output = '';
+
+ if (node.value) {
+ if ((invalidBlock || invalidNode) && utils.isOpenOrClose(node)) {
+ return '\\' + node.value;
+ }
+ return node.value;
+ }
+
+ if (node.value) {
+ return node.value;
+ }
+
+ if (node.nodes) {
+ for (let child of node.nodes) {
+ output += stringify(child);
+ }
+ }
+ return output;
+ };
+
+ return stringify(ast);
+};
+
+
+
+/***/ }),
+
+/***/ 5207:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+exports.isInteger = num => {
+ if (typeof num === 'number') {
+ return Number.isInteger(num);
+ }
+ if (typeof num === 'string' && num.trim() !== '') {
+ return Number.isInteger(Number(num));
+ }
+ return false;
+};
+
+/**
+ * Find a node of the given type
+ */
+
+exports.find = (node, type) => node.nodes.find(node => node.type === type);
+
+/**
+ * Find a node of the given type
+ */
+
+exports.exceedsLimit = (min, max, step = 1, limit) => {
+ if (limit === false) return false;
+ if (!exports.isInteger(min) || !exports.isInteger(max)) return false;
+ return ((Number(max) - Number(min)) / Number(step)) >= limit;
+};
+
+/**
+ * Escape the given node with '\\' before node.value
+ */
+
+exports.escapeNode = (block, n = 0, type) => {
+ let node = block.nodes[n];
+ if (!node) return;
+
+ if ((type && node.type === type) || node.type === 'open' || node.type === 'close') {
+ if (node.escaped !== true) {
+ node.value = '\\' + node.value;
+ node.escaped = true;
+ }
+ }
+};
+
+/**
+ * Returns true if the given brace node should be enclosed in literal braces
+ */
+
+exports.encloseBrace = node => {
+ if (node.type !== 'brace') return false;
+ if ((node.commas >> 0 + node.ranges >> 0) === 0) {
+ node.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a brace node is invalid.
+ */
+
+exports.isInvalidBrace = block => {
+ if (block.type !== 'brace') return false;
+ if (block.invalid === true || block.dollar) return true;
+ if ((block.commas >> 0 + block.ranges >> 0) === 0) {
+ block.invalid = true;
+ return true;
+ }
+ if (block.open !== true || block.close !== true) {
+ block.invalid = true;
+ return true;
+ }
+ return false;
+};
+
+/**
+ * Returns true if a node is an open or close node
+ */
+
+exports.isOpenOrClose = node => {
+ if (node.type === 'open' || node.type === 'close') {
+ return true;
+ }
+ return node.open === true || node.close === true;
+};
+
+/**
+ * Reduce an array of text nodes.
+ */
+
+exports.reduce = nodes => nodes.reduce((acc, node) => {
+ if (node.type === 'text') acc.push(node.value);
+ if (node.type === 'range') node.type = 'text';
+ return acc;
+}, []);
+
+/**
+ * Flatten an array
+ */
+
+exports.flatten = (...args) => {
+ const result = [];
+ const flat = arr => {
+ for (let i = 0; i < arr.length; i++) {
+ let ele = arr[i];
+ Array.isArray(ele) ? flat(ele, result) : ele !== void 0 && result.push(ele);
+ }
+ return result;
+ };
+ flat(args);
+ return result;
+};
+
+
+/***/ }),
+
+/***/ 4243:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.config = {
+ sort_key: false,
+};
+
+
+/***/ }),
+
+/***/ 5860:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const debug_1 = __nccwpck_require__(1633);
+const encode_1 = __nccwpck_require__(3142);
+const memory_1 = __nccwpck_require__(7020);
+function compress(o) {
+ const mem = memory_1.makeInMemoryMemory();
+ const root = memory_1.addValue(mem, o, undefined);
+ const values = memory_1.memToValues(mem);
+ return [values, root];
+}
+exports.compress = compress;
+function decodeObject(values, s) {
+ if (s === 'o|') {
+ return {};
+ }
+ const o = {};
+ const vs = s.split('|');
+ const key_id = vs[1];
+ let keys = decode(values, key_id);
+ const n = vs.length;
+ if (n - 2 === 1 && !Array.isArray(keys)) {
+ // single-key object using existing value as key
+ keys = [keys];
+ }
+ for (let i = 2; i < n; i++) {
+ const k = keys[i - 2];
+ let v = vs[i];
+ v = decode(values, v);
+ o[k] = v;
+ }
+ return o;
+}
+function decodeArray(values, s) {
+ if (s === 'a|') {
+ return [];
+ }
+ const vs = s.split('|');
+ const n = vs.length - 1;
+ const xs = new Array(n);
+ for (let i = 0; i < n; i++) {
+ let v = vs[i + 1];
+ v = decode(values, v);
+ xs[i] = v;
+ }
+ return xs;
+}
+function decode(values, key) {
+ if (key === '' || key === '_') {
+ return null;
+ }
+ const id = encode_1.decodeKey(key);
+ const v = values[id];
+ if (v === null) {
+ return v;
+ }
+ switch (typeof v) {
+ case 'undefined':
+ return v;
+ case 'number':
+ return v;
+ case 'string':
+ const prefix = v[0] + v[1];
+ switch (prefix) {
+ case 'b|':
+ return encode_1.decodeBool(v);
+ case 'o|':
+ return decodeObject(values, v);
+ case 'n|':
+ return encode_1.decodeNum(v);
+ case 'a|':
+ return decodeArray(values, v);
+ default:
+ return encode_1.decodeStr(v);
+ }
+ }
+ return debug_1.throwUnknownDataType(v);
+}
+exports.decode = decode;
+function decompress(c) {
+ const [values, root] = c;
+ return decode(values, root);
+}
+exports.decompress = decompress;
+
+
+/***/ }),
+
+/***/ 1633:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function getType(o) {
+ return Object.prototype.toString.call(o);
+}
+exports.getType = getType;
+function throwUnknownDataType(o) {
+ throw new TypeError('unsupported data type: ' + getType(o));
+}
+exports.throwUnknownDataType = throwUnknownDataType;
+
+
+/***/ }),
+
+/***/ 3142:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const number_1 = __nccwpck_require__(4264);
+function encodeNum(num) {
+ const a = 'n|' + number_1.num_to_s(num);
+ return a;
+ // let b = num.toString()
+ // return a.length < b.length ? a : num
+}
+exports.encodeNum = encodeNum;
+function decodeNum(s) {
+ s = s.replace('n|', '');
+ return number_1.s_to_num(s);
+}
+exports.decodeNum = decodeNum;
+function decodeKey(key) {
+ return typeof key === 'number' ? key : number_1.s_to_int(key);
+}
+exports.decodeKey = decodeKey;
+function encodeBool(b) {
+ // return 'b|' + bool_to_s(b)
+ return b ? 'b|T' : 'b|F';
+}
+exports.encodeBool = encodeBool;
+function decodeBool(s) {
+ switch (s) {
+ case 'b|T':
+ return true;
+ case 'b|F':
+ return false;
+ }
+ return !!s;
+}
+exports.decodeBool = decodeBool;
+function encodeStr(str) {
+ const prefix = str[0] + str[1];
+ switch (prefix) {
+ case 'b|':
+ case 'o|':
+ case 'n|':
+ case 'a|':
+ case 's|':
+ str = 's|' + str;
+ }
+ return str;
+}
+exports.encodeStr = encodeStr;
+function decodeStr(s) {
+ const prefix = s[0] + s[1];
+ return prefix === 's|' ? s.substr(2) : s;
+}
+exports.decodeStr = decodeStr;
+
+
+/***/ }),
+
+/***/ 5135:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+function trimUndefined(object) {
+ for (const key in object) {
+ if (object[key] === undefined) {
+ delete object[key];
+ }
+ }
+}
+exports.trimUndefined = trimUndefined;
+function trimUndefinedRecursively(object) {
+ trimUndefinedRecursivelyLoop(object, new Set());
+}
+exports.trimUndefinedRecursively = trimUndefinedRecursively;
+function trimUndefinedRecursivelyLoop(object, tracks) {
+ tracks.add(object);
+ for (const key in object) {
+ if (object[key] === undefined) {
+ delete object[key];
+ }
+ else {
+ const value = object[key];
+ if (value && typeof value === 'object' && !tracks.has(value)) {
+ trimUndefinedRecursivelyLoop(value, tracks);
+ }
+ }
+ }
+}
+
+
+/***/ }),
+
+/***/ 558:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+/* for direct usage */
+var core_1 = __nccwpck_require__(5860);
+exports.compress = core_1.compress;
+exports.decompress = core_1.decompress;
+/* for custom wrapper */
+var core_2 = __nccwpck_require__(5860);
+exports.decode = core_2.decode;
+var memory_1 = __nccwpck_require__(7020);
+exports.addValue = memory_1.addValue;
+/* to remove undefined object fields */
+var helpers_1 = __nccwpck_require__(5135);
+exports.trimUndefined = helpers_1.trimUndefined;
+exports.trimUndefinedRecursively = helpers_1.trimUndefinedRecursively;
+
+
+/***/ }),
+
+/***/ 7020:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const config_1 = __nccwpck_require__(4243);
+const debug_1 = __nccwpck_require__(1633);
+const encode_1 = __nccwpck_require__(3142);
+const number_1 = __nccwpck_require__(4264);
+function memToValues(mem) {
+ return mem.store.toArray();
+}
+exports.memToValues = memToValues;
+function makeInMemoryStore() {
+ const mem = [];
+ return {
+ forEach(cb) {
+ for (let i = 0; i < mem.length; i++) {
+ if (cb(mem[i]) === 'break') {
+ return;
+ }
+ }
+ },
+ add(value) {
+ mem.push(value);
+ },
+ toArray() {
+ return mem;
+ },
+ };
+}
+exports.makeInMemoryStore = makeInMemoryStore;
+function makeInMemoryCache() {
+ const valueMem = Object.create(null);
+ const schemaMem = Object.create(null);
+ return {
+ getValue(key) {
+ return valueMem[key];
+ },
+ getSchema(key) {
+ return schemaMem[key];
+ },
+ forEachValue(cb) {
+ for (const [key, value] of Object.entries(valueMem)) {
+ if (cb(key, value) === 'break') {
+ return;
+ }
+ }
+ },
+ forEachSchema(cb) {
+ for (const [key, value] of Object.entries(schemaMem)) {
+ if (cb(key, value) === 'break') {
+ return;
+ }
+ }
+ },
+ setValue(key, value) {
+ valueMem[key] = value;
+ },
+ setSchema(key, value) {
+ schemaMem[key] = value;
+ },
+ hasValue(key) {
+ return key in valueMem;
+ },
+ hasSchema(key) {
+ return key in schemaMem;
+ },
+ };
+}
+exports.makeInMemoryCache = makeInMemoryCache;
+function makeInMemoryMemory() {
+ return {
+ store: makeInMemoryStore(),
+ cache: makeInMemoryCache(),
+ keyCount: 0,
+ };
+}
+exports.makeInMemoryMemory = makeInMemoryMemory;
+function getValueKey(mem, value) {
+ if (mem.cache.hasValue(value)) {
+ return mem.cache.getValue(value);
+ }
+ const id = mem.keyCount++;
+ const key = number_1.num_to_s(id);
+ mem.store.add(value);
+ mem.cache.setValue(value, key);
+ return key;
+}
+/** @remark in-place sort the keys */
+function getSchema(mem, keys) {
+ if (config_1.config.sort_key) {
+ keys.sort();
+ }
+ const schema = keys.join(',');
+ if (mem.cache.hasSchema(schema)) {
+ return mem.cache.getSchema(schema);
+ }
+ const key_id = addValue(mem, keys, undefined);
+ mem.cache.setSchema(schema, key_id);
+ return key_id;
+}
+function addValue(mem, o, parent) {
+ if (o === null) {
+ return '';
+ }
+ switch (typeof o) {
+ case 'undefined':
+ if (Array.isArray(parent)) {
+ return addValue(mem, null, parent);
+ }
+ break;
+ case 'object':
+ if (o === null) {
+ return getValueKey(mem, null);
+ }
+ if (Array.isArray(o)) {
+ let acc = 'a';
+ for (let i = 0; i < o.length; i++) {
+ const v = o[i];
+ const key = v === null ? '_' : addValue(mem, v, o);
+ acc += '|' + key;
+ }
+ if (acc === 'a') {
+ acc = 'a|';
+ }
+ return getValueKey(mem, acc);
+ }
+ else {
+ const keys = Object.keys(o);
+ if (keys.length === 0) {
+ return getValueKey(mem, 'o|');
+ }
+ let acc = 'o';
+ const key_id = getSchema(mem, keys);
+ acc += '|' + key_id;
+ for (const key of keys) {
+ const value = o[key];
+ const v = addValue(mem, value, o);
+ acc += '|' + v;
+ }
+ return getValueKey(mem, acc);
+ }
+ case 'boolean':
+ return getValueKey(mem, encode_1.encodeBool(o));
+ case 'number':
+ return getValueKey(mem, encode_1.encodeNum(o));
+ case 'string':
+ return getValueKey(mem, encode_1.encodeStr(o));
+ }
+ return debug_1.throwUnknownDataType(o);
+}
+exports.addValue = addValue;
+
+
+/***/ }),
+
+/***/ 4264:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+let i_to_s = '';
+for (let i = 0; i < 10; i++) {
+ const c = String.fromCharCode(48 + i);
+ i_to_s += c;
+}
+for (let i = 0; i < 26; i++) {
+ const c = String.fromCharCode(65 + i);
+ i_to_s += c;
+}
+for (let i = 0; i < 26; i++) {
+ const c = String.fromCharCode(65 + 32 + i);
+ i_to_s += c;
+}
+const N = i_to_s.length;
+const s_to_i = {};
+for (let i = 0; i < N; i++) {
+ const s = i_to_s[i];
+ s_to_i[s] = i;
+}
+function s_to_int(s) {
+ let acc = 0;
+ let pow = 1;
+ for (let i = s.length - 1; i >= 0; i--) {
+ const c = s[i];
+ let x = s_to_i[c];
+ x *= pow;
+ acc += x;
+ pow *= N;
+ }
+ return acc;
+}
+exports.s_to_int = s_to_int;
+function s_to_big_int(s) {
+ let acc = BigInt(0);
+ let pow = BigInt(1);
+ const n = BigInt(N);
+ for (let i = s.length - 1; i >= 0; i--) {
+ const c = s[i];
+ let x = BigInt(s_to_i[c]);
+ x *= pow;
+ acc += x;
+ pow *= n;
+ }
+ return acc;
+}
+exports.s_to_big_int = s_to_big_int;
+function int_to_s(int) {
+ if (int === 0) {
+ return i_to_s[0];
+ }
+ const acc = [];
+ while (int !== 0) {
+ const i = int % N;
+ const c = i_to_s[i];
+ acc.push(c);
+ int -= i;
+ int /= N;
+ }
+ return acc.reverse().join('');
+}
+exports.int_to_s = int_to_s;
+function big_int_to_s(int) {
+ const zero = BigInt(0);
+ const n = BigInt(N);
+ if (int === zero) {
+ return i_to_s[0];
+ }
+ const acc = [];
+ while (int !== zero) {
+ const i = int % n;
+ const c = i_to_s[Number(i)];
+ acc.push(c);
+ int -= i;
+ int /= n;
+ }
+ return acc.reverse().join('');
+}
+exports.big_int_to_s = big_int_to_s;
+function reverse(s) {
+ return s.split('').reverse().join('');
+}
+function num_to_s(num) {
+ if (num < 0) {
+ return '-' + num_to_s(-num);
+ }
+ let [a, b] = num.toString().split('.');
+ if (!b) {
+ return int_to_s(num);
+ }
+ let c;
+ if (b) {
+ [b, c] = b.split('e');
+ }
+ a = int_str_to_s(a);
+ b = reverse(b);
+ b = int_str_to_s(b);
+ let str = a + '.' + b;
+ if (c) {
+ str += '.';
+ switch (c[0]) {
+ case '+':
+ c = c.slice(1);
+ break;
+ case '-':
+ str += '-';
+ c = c.slice(1);
+ break;
+ }
+ c = reverse(c);
+ c = int_str_to_s(c);
+ str += c;
+ }
+ return str;
+}
+exports.num_to_s = num_to_s;
+function int_str_to_s(int_str) {
+ const num = +int_str;
+ if (num.toString() === int_str) {
+ return int_to_s(num);
+ }
+ return ':' + big_int_to_s(BigInt(int_str));
+}
+exports.int_str_to_s = int_str_to_s;
+function s_to_int_str(s) {
+ if (s[0] === ':') {
+ return s_to_big_int(s.substring(1)).toString();
+ }
+ return s_to_int(s).toString();
+}
+function s_to_num(s) {
+ if (s[0] === '-') {
+ return -s_to_num(s.substr(1));
+ }
+ let [a, b, c] = s.split('.');
+ if (!b) {
+ return s_to_int(a);
+ }
+ a = s_to_int_str(a);
+ b = s_to_int_str(b);
+ b = reverse(b);
+ let str = a + '.' + b;
+ if (c) {
+ str += 'e';
+ let neg = false;
+ if (c[0] === '-') {
+ neg = true;
+ c = c.slice(1);
+ }
+ c = s_to_int_str(c);
+ c = reverse(c);
+ str += neg ? -c : +c;
+ }
+ return +str;
+}
+exports.s_to_num = s_to_num;
+
+
+/***/ }),
+
+/***/ 6891:
+/***/ ((module) => {
+
+module.exports = function (xs, fn) {
+ var res = [];
+ for (var i = 0; i < xs.length; i++) {
+ var x = fn(xs[i], i);
+ if (isArray(x)) res.push.apply(res, x);
+ else res.push(x);
+ }
+ return res;
+};
+
+var isArray = Array.isArray || function (xs) {
+ return Object.prototype.toString.call(xs) === '[object Array]';
+};
+
+
+/***/ }),
+
+/***/ 5446:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+// NOTE: These type checking functions intentionally don't use `instanceof`
+// because it is fragile and can be easily faked with `Object.create()`.
+
+function isArray(arg) {
+ if (Array.isArray) {
+ return Array.isArray(arg);
+ }
+ return objectToString(arg) === '[object Array]';
+}
+exports.isArray = isArray;
+
+function isBoolean(arg) {
+ return typeof arg === 'boolean';
+}
+exports.isBoolean = isBoolean;
+
+function isNull(arg) {
+ return arg === null;
+}
+exports.isNull = isNull;
+
+function isNullOrUndefined(arg) {
+ return arg == null;
+}
+exports.isNullOrUndefined = isNullOrUndefined;
+
+function isNumber(arg) {
+ return typeof arg === 'number';
+}
+exports.isNumber = isNumber;
+
+function isString(arg) {
+ return typeof arg === 'string';
+}
+exports.isString = isString;
+
+function isSymbol(arg) {
+ return typeof arg === 'symbol';
+}
+exports.isSymbol = isSymbol;
+
+function isUndefined(arg) {
+ return arg === void 0;
+}
+exports.isUndefined = isUndefined;
+
+function isRegExp(re) {
+ return objectToString(re) === '[object RegExp]';
+}
+exports.isRegExp = isRegExp;
+
+function isObject(arg) {
+ return typeof arg === 'object' && arg !== null;
+}
+exports.isObject = isObject;
+
+function isDate(d) {
+ return objectToString(d) === '[object Date]';
+}
+exports.isDate = isDate;
+
+function isError(e) {
+ return (objectToString(e) === '[object Error]' || e instanceof Error);
+}
+exports.isError = isError;
+
+function isFunction(arg) {
+ return typeof arg === 'function';
+}
+exports.isFunction = isFunction;
+
+function isPrimitive(arg) {
+ return arg === null ||
+ typeof arg === 'boolean' ||
+ typeof arg === 'number' ||
+ typeof arg === 'string' ||
+ typeof arg === 'symbol' || // ES6 symbol
+ typeof arg === 'undefined';
+}
+exports.isPrimitive = isPrimitive;
+
+exports.isBuffer = __nccwpck_require__(4300).Buffer.isBuffer;
+
+function objectToString(o) {
+ return Object.prototype.toString.call(o);
+}
+
+
+/***/ }),
+
+/***/ 8932:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+class Deprecation extends Error {
+ constructor(message) {
+ super(message); // Maintains proper stack trace (only available on V8)
+
+ /* istanbul ignore next */
+
+ if (Error.captureStackTrace) {
+ Error.captureStackTrace(this, this.constructor);
+ }
+
+ this.name = 'Deprecation';
+ }
+
+}
+
+exports.Deprecation = Deprecation;
+
+
+/***/ }),
+
+/***/ 3505:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+var util = __nccwpck_require__(3837);
+var isArrayish = __nccwpck_require__(7604);
+
+var errorEx = function errorEx(name, properties) {
+ if (!name || name.constructor !== String) {
+ properties = name || {};
+ name = Error.name;
+ }
+
+ var errorExError = function ErrorEXError(message) {
+ if (!this) {
+ return new ErrorEXError(message);
+ }
+
+ message = message instanceof Error
+ ? message.message
+ : (message || this.message);
+
+ Error.call(this, message);
+ Error.captureStackTrace(this, errorExError);
+
+ this.name = name;
+
+ Object.defineProperty(this, 'message', {
+ configurable: true,
+ enumerable: false,
+ get: function () {
+ var newMessage = message.split(/\r?\n/g);
+
+ for (var key in properties) {
+ if (!properties.hasOwnProperty(key)) {
+ continue;
+ }
+
+ var modifier = properties[key];
+
+ if ('message' in modifier) {
+ newMessage = modifier.message(this[key], newMessage) || newMessage;
+ if (!isArrayish(newMessage)) {
+ newMessage = [newMessage];
+ }
+ }
+ }
+
+ return newMessage.join('\n');
+ },
+ set: function (v) {
+ message = v;
+ }
+ });
+
+ var overwrittenStack = null;
+
+ var stackDescriptor = Object.getOwnPropertyDescriptor(this, 'stack');
+ var stackGetter = stackDescriptor.get;
+ var stackValue = stackDescriptor.value;
+ delete stackDescriptor.value;
+ delete stackDescriptor.writable;
+
+ stackDescriptor.set = function (newstack) {
+ overwrittenStack = newstack;
+ };
+
+ stackDescriptor.get = function () {
+ var stack = (overwrittenStack || ((stackGetter)
+ ? stackGetter.call(this)
+ : stackValue)).split(/\r?\n+/g);
+
+ // starting in Node 7, the stack builder caches the message.
+ // just replace it.
+ if (!overwrittenStack) {
+ stack[0] = this.name + ': ' + this.message;
+ }
+
+ var lineCount = 1;
+ for (var key in properties) {
+ if (!properties.hasOwnProperty(key)) {
+ continue;
+ }
+
+ var modifier = properties[key];
+
+ if ('line' in modifier) {
+ var line = modifier.line(this[key]);
+ if (line) {
+ stack.splice(lineCount++, 0, ' ' + line);
+ }
+ }
+
+ if ('stack' in modifier) {
+ modifier.stack(this[key], stack);
+ }
+ }
+
+ return stack.join('\n');
+ };
+
+ Object.defineProperty(this, 'stack', stackDescriptor);
+ };
+
+ if (Object.setPrototypeOf) {
+ Object.setPrototypeOf(errorExError.prototype, Error.prototype);
+ Object.setPrototypeOf(errorExError, Error);
+ } else {
+ util.inherits(errorExError, Error);
+ }
+
+ return errorExError;
+};
+
+errorEx.append = function (str, def) {
+ return {
+ message: function (v, message) {
+ v = v || def;
+
+ if (v) {
+ message[0] += ' ' + str.replace('%s', v.toString());
+ }
+
+ return message;
+ }
+ };
+};
+
+errorEx.line = function (str, def) {
+ return {
+ line: function (v) {
+ v = v || def;
+
+ if (v) {
+ return str.replace('%s', v.toString());
+ }
+
+ return null;
+ }
+ };
+};
+
+module.exports = errorEx;
+
+
+/***/ }),
+
+/***/ 6330:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+/*!
+ * fill-range
+ *
+ * Copyright (c) 2014-present, Jon Schlinkert.
+ * Licensed under the MIT License.
+ */
+
+
+
+const util = __nccwpck_require__(3837);
+const toRegexRange = __nccwpck_require__(1861);
+
+const isObject = val => val !== null && typeof val === 'object' && !Array.isArray(val);
+
+const transform = toNumber => {
+ return value => toNumber === true ? Number(value) : String(value);
+};
+
+const isValidValue = value => {
+ return typeof value === 'number' || (typeof value === 'string' && value !== '');
+};
+
+const isNumber = num => Number.isInteger(+num);
+
+const zeros = input => {
+ let value = `${input}`;
+ let index = -1;
+ if (value[0] === '-') value = value.slice(1);
+ if (value === '0') return false;
+ while (value[++index] === '0');
+ return index > 0;
+};
+
+const stringify = (start, end, options) => {
+ if (typeof start === 'string' || typeof end === 'string') {
+ return true;
+ }
+ return options.stringify === true;
+};
+
+const pad = (input, maxLength, toNumber) => {
+ if (maxLength > 0) {
+ let dash = input[0] === '-' ? '-' : '';
+ if (dash) input = input.slice(1);
+ input = (dash + input.padStart(dash ? maxLength - 1 : maxLength, '0'));
+ }
+ if (toNumber === false) {
+ return String(input);
+ }
+ return input;
+};
+
+const toMaxLen = (input, maxLength) => {
+ let negative = input[0] === '-' ? '-' : '';
+ if (negative) {
+ input = input.slice(1);
+ maxLength--;
+ }
+ while (input.length < maxLength) input = '0' + input;
+ return negative ? ('-' + input) : input;
+};
+
+const toSequence = (parts, options) => {
+ parts.negatives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+ parts.positives.sort((a, b) => a < b ? -1 : a > b ? 1 : 0);
+
+ let prefix = options.capture ? '' : '?:';
+ let positives = '';
+ let negatives = '';
+ let result;
+
+ if (parts.positives.length) {
+ positives = parts.positives.join('|');
+ }
+
+ if (parts.negatives.length) {
+ negatives = `-(${prefix}${parts.negatives.join('|')})`;
+ }
+
+ if (positives && negatives) {
+ result = `${positives}|${negatives}`;
+ } else {
+ result = positives || negatives;
+ }
+
+ if (options.wrap) {
+ return `(${prefix}${result})`;
+ }
+
+ return result;
+};
+
+const toRange = (a, b, isNumbers, options) => {
+ if (isNumbers) {
+ return toRegexRange(a, b, { wrap: false, ...options });
+ }
+
+ let start = String.fromCharCode(a);
+ if (a === b) return start;
+
+ let stop = String.fromCharCode(b);
+ return `[${start}-${stop}]`;
+};
+
+const toRegex = (start, end, options) => {
+ if (Array.isArray(start)) {
+ let wrap = options.wrap === true;
+ let prefix = options.capture ? '' : '?:';
+ return wrap ? `(${prefix}${start.join('|')})` : start.join('|');
+ }
+ return toRegexRange(start, end, options);
+};
+
+const rangeError = (...args) => {
+ return new RangeError('Invalid range arguments: ' + util.inspect(...args));
+};
+
+const invalidRange = (start, end, options) => {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+};
+
+const invalidStep = (step, options) => {
+ if (options.strictRanges === true) {
+ throw new TypeError(`Expected step "${step}" to be a number`);
+ }
+ return [];
+};
+
+const fillNumbers = (start, end, step = 1, options = {}) => {
+ let a = Number(start);
+ let b = Number(end);
+
+ if (!Number.isInteger(a) || !Number.isInteger(b)) {
+ if (options.strictRanges === true) throw rangeError([start, end]);
+ return [];
+ }
+
+ // fix negative zero
+ if (a === 0) a = 0;
+ if (b === 0) b = 0;
+
+ let descending = a > b;
+ let startString = String(start);
+ let endString = String(end);
+ let stepString = String(step);
+ step = Math.max(Math.abs(step), 1);
+
+ let padded = zeros(startString) || zeros(endString) || zeros(stepString);
+ let maxLen = padded ? Math.max(startString.length, endString.length, stepString.length) : 0;
+ let toNumber = padded === false && stringify(start, end, options) === false;
+ let format = options.transform || transform(toNumber);
+
+ if (options.toRegex && step === 1) {
+ return toRange(toMaxLen(start, maxLen), toMaxLen(end, maxLen), true, options);
+ }
+
+ let parts = { negatives: [], positives: [] };
+ let push = num => parts[num < 0 ? 'negatives' : 'positives'].push(Math.abs(num));
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ if (options.toRegex === true && step > 1) {
+ push(a);
+ } else {
+ range.push(pad(format(a, index), maxLen, toNumber));
+ }
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return step > 1
+ ? toSequence(parts, options)
+ : toRegex(range, null, { wrap: false, ...options });
+ }
+
+ return range;
+};
+
+const fillLetters = (start, end, step = 1, options = {}) => {
+ if ((!isNumber(start) && start.length > 1) || (!isNumber(end) && end.length > 1)) {
+ return invalidRange(start, end, options);
+ }
+
+
+ let format = options.transform || (val => String.fromCharCode(val));
+ let a = `${start}`.charCodeAt(0);
+ let b = `${end}`.charCodeAt(0);
+
+ let descending = a > b;
+ let min = Math.min(a, b);
+ let max = Math.max(a, b);
+
+ if (options.toRegex && step === 1) {
+ return toRange(min, max, false, options);
+ }
+
+ let range = [];
+ let index = 0;
+
+ while (descending ? a >= b : a <= b) {
+ range.push(format(a, index));
+ a = descending ? a - step : a + step;
+ index++;
+ }
+
+ if (options.toRegex === true) {
+ return toRegex(range, null, { wrap: false, options });
+ }
+
+ return range;
+};
+
+const fill = (start, end, step, options = {}) => {
+ if (end == null && isValidValue(start)) {
+ return [start];
+ }
+
+ if (!isValidValue(start) || !isValidValue(end)) {
+ return invalidRange(start, end, options);
+ }
+
+ if (typeof step === 'function') {
+ return fill(start, end, 1, { transform: step });
+ }
+
+ if (isObject(step)) {
+ return fill(start, end, 0, step);
+ }
+
+ let opts = { ...options };
+ if (opts.capture === true) opts.wrap = true;
+ step = step || opts.step || 1;
+
+ if (!isNumber(step)) {
+ if (step != null && !isObject(step)) return invalidStep(step, opts);
+ return fill(start, end, 1, step);
+ }
+
+ if (isNumber(start) && isNumber(end)) {
+ return fillNumbers(start, end, step, opts);
+ }
+
+ return fillLetters(start, end, Math.max(Math.abs(step), 1), opts);
+};
+
+module.exports = fill;
+
+
+/***/ }),
+
+/***/ 6863:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __nccwpck_require__(7147)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __nccwpck_require__(1734)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
+/***/ }),
+
+/***/ 1734:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+var pathModule = __nccwpck_require__(1017);
+var isWindows = process.platform === 'win32';
+var fs = __nccwpck_require__(7147);
+
+// JavaScript implementation of realpath, ported from node pre-v6
+
+var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
+
+function rethrow() {
+ // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
+ // is fairly slow to generate.
+ var callback;
+ if (DEBUG) {
+ var backtrace = new Error;
+ callback = debugCallback;
+ } else
+ callback = missingCallback;
+
+ return callback;
+
+ function debugCallback(err) {
+ if (err) {
+ backtrace.message = err.message;
+ err = backtrace;
+ missingCallback(err);
+ }
+ }
+
+ function missingCallback(err) {
+ if (err) {
+ if (process.throwDeprecation)
+ throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+ else if (!process.noDeprecation) {
+ var msg = 'fs: missing callback ' + (err.stack || err.message);
+ if (process.traceDeprecation)
+ console.trace(msg);
+ else
+ console.error(msg);
+ }
+ }
+ }
+}
+
+function maybeCallback(cb) {
+ return typeof cb === 'function' ? cb : rethrow();
+}
+
+var normalize = pathModule.normalize;
+
+// Regexp that finds the next partion of a (partial) path
+// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
+if (isWindows) {
+ var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
+} else {
+ var nextPartRe = /(.*?)(?:[\/]+|$)/g;
+}
+
+// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
+if (isWindows) {
+ var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
+} else {
+ var splitRootRe = /^[\/]*/;
+}
+
+exports.realpathSync = function realpathSync(p, cache) {
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return cache[p];
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstatSync(base);
+ knownHard[base] = true;
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ // NB: p.length changes.
+ while (pos < p.length) {
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ continue;
+ }
+
+ var resolvedLink;
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // some known symbolic link. no need to stat again.
+ resolvedLink = cache[base];
+ } else {
+ var stat = fs.lstatSync(base);
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ continue;
+ }
+
+ // read the link if it wasn't read before
+ // dev/ino always return 0 on windows, so skip the check.
+ var linkTarget = null;
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ linkTarget = seenLinks[id];
+ }
+ }
+ if (linkTarget === null) {
+ fs.statSync(base);
+ linkTarget = fs.readlinkSync(base);
+ }
+ resolvedLink = pathModule.resolve(previous, linkTarget);
+ // track this, if given a cache.
+ if (cache) cache[base] = resolvedLink;
+ if (!isWindows) seenLinks[id] = linkTarget;
+ }
+
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+
+ if (cache) cache[original] = p;
+
+ return p;
+};
+
+
+exports.realpath = function realpath(p, cache, cb) {
+ if (typeof cb !== 'function') {
+ cb = maybeCallback(cache);
+ cache = null;
+ }
+
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return process.nextTick(cb.bind(null, null, cache[p]));
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstat(base, function(err) {
+ if (err) return cb(err);
+ knownHard[base] = true;
+ LOOP();
+ });
+ } else {
+ process.nextTick(LOOP);
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ function LOOP() {
+ // stop if scanned past end of path
+ if (pos >= p.length) {
+ if (cache) cache[original] = p;
+ return cb(null, p);
+ }
+
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ return process.nextTick(LOOP);
+ }
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // known symbolic link. no need to stat again.
+ return gotResolvedLink(cache[base]);
+ }
+
+ return fs.lstat(base, gotStat);
+ }
+
+ function gotStat(err, stat) {
+ if (err) return cb(err);
+
+ // if not a symlink, skip to the next path part
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ return process.nextTick(LOOP);
+ }
+
+ // stat & read the link if not read before
+ // call gotTarget as soon as the link target is known
+ // dev/ino always return 0 on windows, so skip the check.
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ return gotTarget(null, seenLinks[id], base);
+ }
+ }
+ fs.stat(base, function(err) {
+ if (err) return cb(err);
+
+ fs.readlink(base, function(err, target) {
+ if (!isWindows) seenLinks[id] = target;
+ gotTarget(err, target);
+ });
+ });
+ }
+
+ function gotTarget(err, target, base) {
+ if (err) return cb(err);
+
+ var resolvedLink = pathModule.resolve(previous, target);
+ if (cache) cache[base] = resolvedLink;
+ gotResolvedLink(resolvedLink);
+ }
+
+ function gotResolvedLink(resolvedLink) {
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+};
+
+
+/***/ }),
+
+/***/ 7625:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+exports.setopts = setopts
+exports.ownProp = ownProp
+exports.makeAbs = makeAbs
+exports.finish = finish
+exports.mark = mark
+exports.isIgnored = isIgnored
+exports.childrenIgnored = childrenIgnored
+
+function ownProp (obj, field) {
+ return Object.prototype.hasOwnProperty.call(obj, field)
+}
+
+var fs = __nccwpck_require__(7147)
+var path = __nccwpck_require__(1017)
+var minimatch = __nccwpck_require__(3973)
+var isAbsolute = __nccwpck_require__(8714)
+var Minimatch = minimatch.Minimatch
+
+function alphasort (a, b) {
+ return a.localeCompare(b, 'en')
+}
+
+function setupIgnores (self, options) {
+ self.ignore = options.ignore || []
+
+ if (!Array.isArray(self.ignore))
+ self.ignore = [self.ignore]
+
+ if (self.ignore.length) {
+ self.ignore = self.ignore.map(ignoreMap)
+ }
+}
+
+// ignore patterns are always in dot:true mode.
+function ignoreMap (pattern) {
+ var gmatcher = null
+ if (pattern.slice(-3) === '/**') {
+ var gpattern = pattern.replace(/(\/\*\*)+$/, '')
+ gmatcher = new Minimatch(gpattern, { dot: true })
+ }
+
+ return {
+ matcher: new Minimatch(pattern, { dot: true }),
+ gmatcher: gmatcher
+ }
+}
+
+function setopts (self, pattern, options) {
+ if (!options)
+ options = {}
+
+ // base-matching: just use globstar for that.
+ if (options.matchBase && -1 === pattern.indexOf("/")) {
+ if (options.noglobstar) {
+ throw new Error("base matching requires globstar")
+ }
+ pattern = "**/" + pattern
+ }
+
+ self.silent = !!options.silent
+ self.pattern = pattern
+ self.strict = options.strict !== false
+ self.realpath = !!options.realpath
+ self.realpathCache = options.realpathCache || Object.create(null)
+ self.follow = !!options.follow
+ self.dot = !!options.dot
+ self.mark = !!options.mark
+ self.nodir = !!options.nodir
+ if (self.nodir)
+ self.mark = true
+ self.sync = !!options.sync
+ self.nounique = !!options.nounique
+ self.nonull = !!options.nonull
+ self.nosort = !!options.nosort
+ self.nocase = !!options.nocase
+ self.stat = !!options.stat
+ self.noprocess = !!options.noprocess
+ self.absolute = !!options.absolute
+ self.fs = options.fs || fs
+
+ self.maxLength = options.maxLength || Infinity
+ self.cache = options.cache || Object.create(null)
+ self.statCache = options.statCache || Object.create(null)
+ self.symlinks = options.symlinks || Object.create(null)
+
+ setupIgnores(self, options)
+
+ self.changedCwd = false
+ var cwd = process.cwd()
+ if (!ownProp(options, "cwd"))
+ self.cwd = cwd
+ else {
+ self.cwd = path.resolve(options.cwd)
+ self.changedCwd = self.cwd !== cwd
+ }
+
+ self.root = options.root || path.resolve(self.cwd, "/")
+ self.root = path.resolve(self.root)
+ if (process.platform === "win32")
+ self.root = self.root.replace(/\\/g, "/")
+
+ // TODO: is an absolute `cwd` supposed to be resolved against `root`?
+ // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
+ self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
+ if (process.platform === "win32")
+ self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
+ self.nomount = !!options.nomount
+
+ // disable comments and negation in Minimatch.
+ // Note that they are not supported in Glob itself anyway.
+ options.nonegate = true
+ options.nocomment = true
+ // always treat \ in patterns as escapes, not path separators
+ options.allowWindowsEscape = false
+
+ self.minimatch = new Minimatch(pattern, options)
+ self.options = self.minimatch.options
+}
+
+function finish (self) {
+ var nou = self.nounique
+ var all = nou ? [] : Object.create(null)
+
+ for (var i = 0, l = self.matches.length; i < l; i ++) {
+ var matches = self.matches[i]
+ if (!matches || Object.keys(matches).length === 0) {
+ if (self.nonull) {
+ // do like the shell, and spit out the literal glob
+ var literal = self.minimatch.globSet[i]
+ if (nou)
+ all.push(literal)
+ else
+ all[literal] = true
+ }
+ } else {
+ // had matches
+ var m = Object.keys(matches)
+ if (nou)
+ all.push.apply(all, m)
+ else
+ m.forEach(function (m) {
+ all[m] = true
+ })
+ }
+ }
+
+ if (!nou)
+ all = Object.keys(all)
+
+ if (!self.nosort)
+ all = all.sort(alphasort)
+
+ // at *some* point we statted all of these
+ if (self.mark) {
+ for (var i = 0; i < all.length; i++) {
+ all[i] = self._mark(all[i])
+ }
+ if (self.nodir) {
+ all = all.filter(function (e) {
+ var notDir = !(/\/$/.test(e))
+ var c = self.cache[e] || self.cache[makeAbs(self, e)]
+ if (notDir && c)
+ notDir = c !== 'DIR' && !Array.isArray(c)
+ return notDir
+ })
+ }
+ }
+
+ if (self.ignore.length)
+ all = all.filter(function(m) {
+ return !isIgnored(self, m)
+ })
+
+ self.found = all
+}
+
+function mark (self, p) {
+ var abs = makeAbs(self, p)
+ var c = self.cache[abs]
+ var m = p
+ if (c) {
+ var isDir = c === 'DIR' || Array.isArray(c)
+ var slash = p.slice(-1) === '/'
+
+ if (isDir && !slash)
+ m += '/'
+ else if (!isDir && slash)
+ m = m.slice(0, -1)
+
+ if (m !== p) {
+ var mabs = makeAbs(self, m)
+ self.statCache[mabs] = self.statCache[abs]
+ self.cache[mabs] = self.cache[abs]
+ }
+ }
+
+ return m
+}
+
+// lotta situps...
+function makeAbs (self, f) {
+ var abs = f
+ if (f.charAt(0) === '/') {
+ abs = path.join(self.root, f)
+ } else if (isAbsolute(f) || f === '') {
+ abs = f
+ } else if (self.changedCwd) {
+ abs = path.resolve(self.cwd, f)
+ } else {
+ abs = path.resolve(f)
+ }
+
+ if (process.platform === 'win32')
+ abs = abs.replace(/\\/g, '/')
+
+ return abs
+}
+
+
+// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
+// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
+function isIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+function childrenIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+
+/***/ }),
+
+/***/ 1957:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var rp = __nccwpck_require__(6863)
+var minimatch = __nccwpck_require__(3973)
+var Minimatch = minimatch.Minimatch
+var inherits = __nccwpck_require__(4124)
+var EE = (__nccwpck_require__(2361).EventEmitter)
+var path = __nccwpck_require__(1017)
+var assert = __nccwpck_require__(9491)
+var isAbsolute = __nccwpck_require__(8714)
+var globSync = __nccwpck_require__(9010)
+var common = __nccwpck_require__(7625)
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = __nccwpck_require__(2492)
+var util = __nccwpck_require__(3837)
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = __nccwpck_require__(1223)
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) ||
+ isAbsolute(pattern.map(function (p) {
+ return typeof p === 'string' ? p : '[*]'
+ }).join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ self.fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ self.fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ self.fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return self.fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
+
+
+/***/ }),
+
+/***/ 9010:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = globSync
+globSync.GlobSync = GlobSync
+
+var rp = __nccwpck_require__(6863)
+var minimatch = __nccwpck_require__(3973)
+var Minimatch = minimatch.Minimatch
+var Glob = (__nccwpck_require__(1957).Glob)
+var util = __nccwpck_require__(3837)
+var path = __nccwpck_require__(1017)
+var assert = __nccwpck_require__(9491)
+var isAbsolute = __nccwpck_require__(8714)
+var common = __nccwpck_require__(7625)
+var setopts = common.setopts
+var ownProp = common.ownProp
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+function globSync (pattern, options) {
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ return new GlobSync(pattern, options).found
+}
+
+function GlobSync (pattern, options) {
+ if (!pattern)
+ throw new Error('must provide pattern')
+
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ if (!(this instanceof GlobSync))
+ return new GlobSync(pattern, options)
+
+ setopts(this, pattern, options)
+
+ if (this.noprocess)
+ return this
+
+ var n = this.minimatch.set.length
+ this.matches = new Array(n)
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false)
+ }
+ this._finish()
+}
+
+GlobSync.prototype._finish = function () {
+ assert.ok(this instanceof GlobSync)
+ if (this.realpath) {
+ var self = this
+ this.matches.forEach(function (matchset, index) {
+ var set = self.matches[index] = Object.create(null)
+ for (var p in matchset) {
+ try {
+ p = self._makeAbs(p)
+ var real = rp.realpathSync(p, self.realpathCache)
+ set[real] = true
+ } catch (er) {
+ if (er.syscall === 'stat')
+ set[self._makeAbs(p)] = true
+ else
+ throw er
+ }
+ }
+ })
+ }
+ common.finish(this)
+}
+
+
+GlobSync.prototype._process = function (pattern, index, inGlobStar) {
+ assert.ok(this instanceof GlobSync)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // See if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) ||
+ isAbsolute(pattern.map(function (p) {
+ return typeof p === 'string' ? p : '[*]'
+ }).join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip processing
+ if (childrenIgnored(this, read))
+ return
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
+}
+
+
+GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
+ var entries = this._readdir(abs, inGlobStar)
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix.slice(-1) !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix)
+ newPattern = [prefix, e]
+ else
+ newPattern = [e]
+ this._process(newPattern.concat(remain), index, inGlobStar)
+ }
+}
+
+
+GlobSync.prototype._emitMatch = function (index, e) {
+ if (isIgnored(this, e))
+ return
+
+ var abs = this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute) {
+ e = abs
+ }
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ if (this.stat)
+ this._stat(e)
+}
+
+
+GlobSync.prototype._readdirInGlobStar = function (abs) {
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false)
+
+ var entries
+ var lstat
+ var stat
+ try {
+ lstat = this.fs.lstatSync(abs)
+ } catch (er) {
+ if (er.code === 'ENOENT') {
+ // lstat failed, doesn't exist
+ return null
+ }
+ }
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ this.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory())
+ this.cache[abs] = 'FILE'
+ else
+ entries = this._readdir(abs, false)
+
+ return entries
+}
+
+GlobSync.prototype._readdir = function (abs, inGlobStar) {
+ var entries
+
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return null
+
+ if (Array.isArray(c))
+ return c
+ }
+
+ try {
+ return this._readdirEntries(abs, this.fs.readdirSync(abs))
+ } catch (er) {
+ this._readdirError(abs, er)
+ return null
+ }
+}
+
+GlobSync.prototype._readdirEntries = function (abs, entries) {
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+
+ // mark and cache dir-ness
+ return entries
+}
+
+GlobSync.prototype._readdirError = function (f, er) {
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ throw error
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict)
+ throw er
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+}
+
+GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
+
+ var entries = this._readdir(abs, inGlobStar)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false)
+
+ var len = entries.length
+ var isSym = this.symlinks[abs]
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true)
+ }
+}
+
+GlobSync.prototype._processSimple = function (prefix, index) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var exists = this._stat(prefix)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+}
+
+// Returns either 'DIR', 'FILE', or false
+GlobSync.prototype._stat = function (f) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return false
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (!stat) {
+ var lstat
+ try {
+ lstat = this.fs.lstatSync(abs)
+ } catch (er) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return false
+ }
+ }
+
+ if (lstat && lstat.isSymbolicLink()) {
+ try {
+ stat = this.fs.statSync(abs)
+ } catch (er) {
+ stat = lstat
+ }
+ } else {
+ stat = lstat
+ }
+ }
+
+ this.statCache[abs] = stat
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ return c
+}
+
+GlobSync.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+GlobSync.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+
+/***/ }),
+
+/***/ 2927:
+/***/ ((module) => {
+
+"use strict";
+
+var Mutation = global.MutationObserver || global.WebKitMutationObserver;
+
+var scheduleDrain;
+
+if (process.browser) {
+ if (Mutation) {
+ var called = 0;
+ var observer = new Mutation(nextTick);
+ var element = global.document.createTextNode('');
+ observer.observe(element, {
+ characterData: true
+ });
+ scheduleDrain = function () {
+ element.data = (called = ++called % 2);
+ };
+ } else if (!global.setImmediate && typeof global.MessageChannel !== 'undefined') {
+ var channel = new global.MessageChannel();
+ channel.port1.onmessage = nextTick;
+ scheduleDrain = function () {
+ channel.port2.postMessage(0);
+ };
+ } else if ('document' in global && 'onreadystatechange' in global.document.createElement('script')) {
+ scheduleDrain = function () {
+
+ // Create a