Unverified Commit 542963cf authored by Samruddhi Khandale's avatar Samruddhi Khandale Committed by GitHub

Prep: republish features to ghcr.io (#78)

* syncing action

* modify release.yaml

* version update ; remove "install"

* add 'latest'

* add workflow condition
parent 69d3df5f
......@@ -22,15 +22,33 @@ inputs:
# 'features' options
base-path-to-features:
required: false
default: './features/src'
default: ''
description: "Relative path to the 'src' folder containing dev container 'feature(s)'"
# 'template' options
base-path-to-templates:
required: false
default: './templates/src'
default: ''
description: "Relative path to the folder containing dev container 'template(s)'"
# EXPERIMENTAL
tag-individual-features:
required: false
default: "false"
description: "Tag individual features"
publish-to-npm:
required: false
default: "false"
description: "Should publish features to NPM?"
publish-release-artifacts:
required: false
default: "false"
description: "Publish release artifacts (classic)"
publish-to-oci:
required: false
default: "false"
description: "Publish to OCI?"
runs:
using: 'node16'
main: 'dist/index.js'
......@@ -39,95 +39,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.generateFeaturesDocumentation = void 0;
exports.generateTemplateDocumentation = exports.generateFeaturesDocumentation = void 0;
const fs = __importStar(__nccwpck_require__(7147));
const github = __importStar(__nccwpck_require__(5438));
const core = __importStar(__nccwpck_require__(2186));
const path = __importStar(__nccwpck_require__(1017));
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
fs.readdir(basePath, (err, files) => {
if (err) {
core.error(err.message);
core.setFailed(`failed to generate 'features' documentation ${err.message}`);
return;
}
files.forEach(f => {
core.info(`Generating docs for feature '${f}'`);
if (f !== '.' && f !== '..') {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const featureJsonPath = path.join(basePath, f, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`devcontainer-feature.json not found at path '${featureJsonPath}'`);
return;
}
let featureJson = undefined;
try {
featureJson = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${featureJsonPath}: ${err}`);
return;
}
if (!featureJson || !(featureJson === null || featureJson === void 0 ? void 0 : featureJson.id)) {
core.error(`devcontainer-feature.json for feature '${f}' does not contain an 'id'`);
return;
}
const ref = github.context.ref;
const owner = github.context.repo.owner;
const repo = github.context.repo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = featureJson === null || featureJson === void 0 ? void 0 : featureJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return ('| Options Id | Description | Type | Default Value |\n' +
'|-----|-----|-----|-----|\n' +
contents);
};
const newReadme = README_TEMPLATE.replace('#{nwo}', `${owner}/${repo}`)
.replace('#{versionTag}', versionTag)
.replace('#{featureId}', featureJson.id)
.replace('#{featureName}', featureJson.name
? `${featureJson.name} (${featureJson.id})`
: `${featureJson.id}`)
.replace('#{featureDescription}', featureJson.description ? featureJson.description : '')
.replace('#{optionsTable}', generateOptionsMarkdown());
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
});
});
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
const README_TEMPLATE = `
# #{featureName}
const utils_1 = __nccwpck_require__(918);
const FEATURES_README_TEMPLATE = `
# #{Name}
#{featureDescription}
#{Description}
## Example Usage
\`\`\`json
"features": {
"#{nwo}/#{featureId}@#{versionTag}": {
"#{Nwo}/#{Id}@#{VersionTag}": {
"version": "latest"
}
}
......@@ -135,12 +61,108 @@ const README_TEMPLATE = `
## Options
#{optionsTable}
#{OptionsTable}
---
_Note: This file was auto-generated from the [devcontainer-feature.json](./devcontainer-feature.json)._
_Note: This file was auto-generated from the [devcontainer-feature.json](#{RepoUrl})._
`;
const TEMPLATE_README_TEMPLATE = `
# #{Name}
#{Description}
## Options
#{OptionsTable}
`;
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, FEATURES_README_TEMPLATE, 'devcontainer-feature.json');
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
function generateTemplateDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, TEMPLATE_README_TEMPLATE, 'devcontainer-template.json');
});
}
exports.generateTemplateDocumentation = generateTemplateDocumentation;
function _generateDocumentation(basePath, readmeTemplate, metadataFile) {
return __awaiter(this, void 0, void 0, function* () {
const directories = fs.readdirSync(basePath);
yield Promise.all(directories.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a, _b, _c;
if (!f.startsWith('.')) {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const jsonPath = path.join(basePath, f, metadataFile);
if (!fs.existsSync(jsonPath)) {
core.error(`${metadataFile} not found at path '${jsonPath}'`);
return;
}
let parsedJson = undefined;
try {
parsedJson = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${jsonPath}: ${err}`);
return;
}
if (!parsedJson || !(parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.id)) {
core.error(`${metadataFile} for '${f}' does not contain an 'id'`);
return;
}
const srcInfo = (0, utils_1.getGitHubMetadata)();
const ref = srcInfo.ref;
const owner = srcInfo.owner;
const repo = srcInfo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref && ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return '| Options Id | Description | Type | Default Value |\n' + '|-----|-----|-----|-----|\n' + contents;
};
let urlToConfig = './devcontainer-feature.json';
const basePathTrimmed = basePath.startsWith('./') ? basePath.substring(2) : basePath;
if (srcInfo.owner && srcInfo.repo) {
urlToConfig = `https://github.com/${srcInfo.owner}/${srcInfo.repo}/blob/main/${basePathTrimmed}/${f}/devcontainer-feature.json`;
}
const newReadme = readmeTemplate
// Templates & Features
.replace('#{Id}', parsedJson.id)
.replace('#{Name}', parsedJson.name ? `${parsedJson.name} (${parsedJson.id})` : `${parsedJson.id}`)
.replace('#{Description}', (_a = parsedJson.description) !== null && _a !== void 0 ? _a : '')
.replace('#{OptionsTable}', generateOptionsMarkdown())
// Features Only
.replace('#{Nwo}', `${owner}/${repo}`)
.replace('#{VersionTag}', versionTag)
// Templates Only
.replace('#{ManifestName}', (_c = (_b = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.image) === null || _b === void 0 ? void 0 : _b.manifest) !== null && _c !== void 0 ? _c : '')
.replace('#{RepoUrl}', urlToConfig);
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
})));
});
}
/***/ }),
......@@ -195,42 +217,51 @@ function run() {
core.debug('Reading input parameters...');
// Read inputs
const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true';
const shouldPublishTemplate = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldPublishTemplates = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true';
// Experimental
const shouldTagIndividualFeatures = core.getInput('tag-individual-features').toLowerCase() === 'true';
const shouldPublishToNPM = core.getInput('publish-to-npm').toLowerCase() === 'true';
const shouldPublishReleaseArtifacts = core.getInput('publish-release-artifacts').toLowerCase() === 'true';
const shouldPublishToOCI = core.getInput('publish-to-oci').toLowerCase() === 'true';
const opts = {
shouldTagIndividualFeatures,
shouldPublishToNPM,
shouldPublishReleaseArtifacts,
shouldPublishToOCI
};
const featuresBasePath = core.getInput('base-path-to-features');
const templatesBasePath = core.getInput('base-path-to-templates');
let featuresMetadata = undefined;
let templatesMetadata = undefined;
// -- Package Release Artifacts
if (shouldPublishFeatures) {
core.info('Publishing features...');
const featuresBasePath = core.getInput('base-path-to-features');
featuresMetadata = yield packageFeatures(featuresBasePath);
featuresMetadata = yield packageFeatures(featuresBasePath, opts);
}
if (shouldPublishTemplate) {
if (shouldPublishTemplates) {
core.info('Publishing template...');
const basePathToDefinitions = core.getInput('base-path-to-templates');
templatesMetadata = undefined; // TODO
yield packageTemplates(basePathToDefinitions);
templatesMetadata = yield packageTemplates(templatesBasePath);
}
if (shouldGenerateDocumentation) {
core.info('Generating documentation...');
const featuresBasePath = core.getInput('base-path-to-features');
if (featuresBasePath) {
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
}
else {
core.error("'base-path-to-features' input is required to generate documentation");
}
// TODO: base-path-to-templates
// -- Generate Documentation
if (shouldGenerateDocumentation && featuresBasePath) {
core.info('Generating documentation for features...');
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
}
if (shouldGenerateDocumentation && templatesBasePath) {
core.info('Generating documentation for templates...');
yield (0, generateDocs_1.generateTemplateDocumentation)(templatesBasePath);
}
// TODO: Programatically add feature/template fino with relevant metadata for UX clients.
core.info('Generation metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata);
// -- Programatically add feature/template metadata to collections file.
core.info('Generating metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata, opts);
});
}
function packageFeatures(basePath) {
function packageFeatures(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () {
try {
core.info(`Archiving all features in ${basePath}`);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath, opts);
core.info('Packaging features has finished.');
return metadata;
}
......@@ -245,14 +276,17 @@ function packageFeatures(basePath) {
function packageTemplates(basePath) {
return __awaiter(this, void 0, void 0, function* () {
try {
core.info(`Archiving all templated in ${basePath}`);
yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info(`Archiving all templates in ${basePath}`);
const metadata = yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info('Packaging templates has finished.');
return metadata;
}
catch (error) {
if (error instanceof Error)
if (error instanceof Error) {
core.setFailed(error.message);
}
}
return;
});
}
run();
......@@ -301,11 +335,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.addCollectionsMetadataFile = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.pushCollectionsMetadataToOCI = exports.addCollectionsMetadataFile = exports.getGitHubMetadata = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
const github = __importStar(__nccwpck_require__(5438));
const tar = __importStar(__nccwpck_require__(4674));
const fs = __importStar(__nccwpck_require__(7147));
const core = __importStar(__nccwpck_require__(2186));
const child_process = __importStar(__nccwpck_require__(2081));
const util_1 = __nccwpck_require__(3837);
const path_1 = __importDefault(__nccwpck_require__(1017));
exports.readLocalFile = (0, util_1.promisify)(fs.readFile);
......@@ -328,23 +363,83 @@ function tarDirectory(path, tgzName) {
});
}
exports.tarDirectory = tarDirectory;
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
function getGitHubMetadata() {
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
}
return sourceInformation;
}
exports.getGitHubMetadata = getGitHubMetadata;
function tagFeatureAtVersion(featureMetaData) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
source: 'github',
const featureId = featureMetaData.id;
const featureVersion = featureMetaData.version;
const tagName = `${featureId}_v${featureVersion}`;
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
// Setup Octokit client
const octokit = github.getOctokit(githubToken);
// Use octokit to get all tags for this repo
const tags = yield octokit.rest.repos.listTags({
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
// See if tags for this release was already created.
const tagExists = tags.data.some(tag => tag.name === tagName);
if (tagExists) {
core.info(`Tag ${tagName} already exists. Skipping...`);
return;
}
// Create tag
const createdTag = yield octokit.rest.git.createTag({
tag: tagName,
message: `Feature ${featureId} version ${featureVersion}`,
object: github.context.sha,
type: 'commit',
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
if (createdTag.status === 201) {
core.info(`Tagged '${tagName}'`);
}
else {
core.setFailed(`Failed to tag '${tagName}'`);
return;
}
// Create reference to tag
const createdRef = yield octokit.rest.git.createRef({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
ref: `refs/tags/${tagName}`,
sha: createdTag.data.sha
});
if (createdRef.status === 201) {
core.info(`Created reference for '${tagName}'`);
}
else {
core.setFailed(`Failed to reference of tag '${tagName}'`);
return;
}
});
}
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata, opts) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
const sourceInformation = getGitHubMetadata();
const metadata = {
sourceInformation,
features: featuresMetadata || [],
......@@ -352,27 +447,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
};
// Write to the file
yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4));
if (opts.shouldPublishToOCI) {
pushCollectionsMetadataToOCI(p);
}
});
}
exports.addCollectionsMetadataFile = addCollectionsMetadataFile;
function getFeaturesAndPackage(basePath) {
function pushArtifactToOCI(version, featureName, artifactPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const versions = [version, '1.0', '1', 'latest']; // TODO: Generate semantic versions from 'version'
const sourceInfo = getGitHubMetadata();
yield Promise.all(versions.map((v) => __awaiter(this, void 0, void 0, function* () {
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}/${featureName}:${v}`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${artifactPath}:application/vnd.devcontainers.layer.v1+tar`;
yield exec(cmd);
core.info(`Pushed artifact to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push '${ociRepo}': ${error.message}`);
}
})));
});
}
function pushCollectionsMetadataToOCI(collectionJsonPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const sourceInfo = getGitHubMetadata();
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}:latest`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${collectionJsonPath}:application/vnd.devcontainers.collection.layer.v1+json`;
yield exec(cmd);
core.info(`Pushed collection metadata to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push collection metadata '${ociRepo}': ${error.message}`);
}
});
}
exports.pushCollectionsMetadataToOCI = pushCollectionsMetadataToOCI;
function loginToGHCR() {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
try {
yield exec(`oras login ghcr.io -u USERNAME -p ${githubToken}`);
core.info('Oras logged in successfully!');
}
catch (error) {
if (error instanceof Error)
core.setFailed(` Oras login failed!`);
}
});
}
function getFeaturesAndPackage(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () {
const { shouldPublishToNPM, shouldTagIndividualFeatures, shouldPublishReleaseArtifacts, shouldPublishToOCI } = opts;
const featureDirs = fs.readdirSync(basePath);
let metadatas = [];
const exec = (0, util_1.promisify)(child_process.exec);
if (shouldPublishToOCI) {
yield loginToGHCR();
}
yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a;
core.info(`feature ==> ${f}`);
if (f !== '.' && f !== '..') {
if (!f.startsWith('.')) {
const featureFolder = path_1.default.join(basePath, f);
const archiveName = `${f}.tgz`;
yield tarDirectory(`${basePath}/${f}`, archiveName);
const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`Feature ${f} is missing a devcontainer-feature.json`);
core.error(`Feature '${f}' is missing a devcontainer-feature.json`);
core.setFailed('All features must have a devcontainer-feature.json');
return;
}
const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
if (!featureMetadata.id || !featureMetadata.version) {
core.error(`Feature '${f}' is must defined an id and version`);
core.setFailed('Incomplete devcontainer-feature.json');
}
metadatas.push(featureMetadata);
const sourceInfo = getGitHubMetadata();
if (!sourceInfo.owner) {
core.setFailed('Could not determine repository owner.');
return;
}
const archiveName = `${f}.tgz`;
// ---- PUBLISH RELEASE ARTIFACTS (classic method) ----
if (shouldPublishReleaseArtifacts || shouldPublishToOCI) {
core.info(`** Tar'ing feature`);
yield tarDirectory(featureFolder, archiveName);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToOCI) {
core.info(`** Publishing to OCI`);
// TODO: CHECK IF THE FEATURE IS ALREADY PUBLISHED UNDER GIVEN TAG
yield pushArtifactToOCI(featureMetadata.version, f, archiveName);
}
// ---- TAG INDIVIDUAL FEATURES ----
if (shouldTagIndividualFeatures) {
core.info(`** Tagging individual feature`);
yield tagFeatureAtVersion(featureMetadata);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToNPM) {
core.info(`** Publishing to NPM`);
// Adds a package.json file to the feature folder
const packageJsonPath = path_1.default.join(featureFolder, 'package.json');
// if (!sourceInfo.tag) {
// core.error(`Feature ${f} is missing a tag! Cannot publish to NPM.`);
// core.setFailed('All features published to NPM must be tagged with a version');
// }
const packageJsonObject = {
name: `@${sourceInfo.owner}/${f}`,
version: featureMetadata.version,
description: `${(_a = featureMetadata.description) !== null && _a !== void 0 ? _a : 'My cool feature'}`,
author: `${sourceInfo.owner}`,
keywords: ['devcontainer-features']
};
yield (0, exports.writeLocalFile)(packageJsonPath, JSON.stringify(packageJsonObject, undefined, 4));
core.info(`Feature Folder is: ${featureFolder}`);
// Run npm pack, which 'tars' the folder
const packageName = yield exec(`npm pack ./${featureFolder}`);
if (packageName.stderr) {
core.error(`${packageName.stderr.toString()}`);
}
const publishOutput = yield exec(`npm publish --access public "${packageName.stdout.trim()}"`);
core.info(publishOutput.stdout);
if (publishOutput.stderr) {
core.error(`${publishOutput.stderr}`);
}
}
}
})));
if (metadatas.length === 0) {
......@@ -385,23 +601,29 @@ function getFeaturesAndPackage(basePath) {
exports.getFeaturesAndPackage = getFeaturesAndPackage;
function getTemplatesAndPackage(basePath) {
return __awaiter(this, void 0, void 0, function* () {
let archives = [];
fs.readdir(basePath, (err, files) => {
if (err) {
core.error(err.message);
core.setFailed(`failed to get list of templates: ${err.message}`);
return;
}
files.forEach(file => {
core.info(`template ==> ${file}`);
if (file !== '.' && file !== '..') {
const archiveName = `devcontainer-definition-${file}.tgz`;
tarDirectory(`${basePath}/${file}`, archiveName);
archives.push(archiveName);
const templateDirs = fs.readdirSync(basePath);
let metadatas = [];
yield Promise.all(templateDirs.map((t) => __awaiter(this, void 0, void 0, function* () {
core.info(`template ==> ${t}`);
if (!t.startsWith('.')) {
const templateFolder = path_1.default.join(basePath, t);
const archiveName = `devcontainer-template-${t}.tgz`;
// await tarDirectory(templateFolder, archiveName);
const templateJsonPath = path_1.default.join(templateFolder, 'devcontainer-template.json');
if (!fs.existsSync(templateJsonPath)) {
core.error(`Template '${t}' is missing a devcontainer-template.json`);
core.setFailed('All templates must have a devcontainer-template.json');
return;
}
});
});
return archives;
const templateMetadata = JSON.parse(fs.readFileSync(templateJsonPath, 'utf8'));
metadatas.push(templateMetadata);
}
})));
if (metadatas.length === 0) {
core.setFailed('No templates found');
return;
}
return metadatas;
});
}
exports.getTemplatesAndPackage = getTemplatesAndPackage;
......@@ -823,6 +1045,23 @@ function getIDToken(aud) {
});
}
exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = __nccwpck_require__(1327);
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
/**
* @deprecated use core.summary
*/
var summary_2 = __nccwpck_require__(1327);
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
/**
* Path exports
*/
var path_utils_1 = __nccwpck_require__(2981);
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
//# sourceMappingURL=core.js.map
/***/ }),
......@@ -892,8 +1131,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OidcClient = void 0;
const http_client_1 = __nccwpck_require__(9925);
const auth_1 = __nccwpck_require__(3702);
const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(5526);
const core_1 = __nccwpck_require__(2186);
class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) {
......@@ -960,97 +1199,452 @@ exports.OidcClient = OidcClient;
/***/ }),
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
/***/ 2981:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toCommandProperties = exports.toCommandValue = void 0;
exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(__nccwpck_require__(1017));
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
* toPosixPath converts the given path to the posix form. On Windows, \\ will be
* replaced with /.
*
* @param pth. Path to transform.
* @return string Posix path.
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
function toPosixPath(pth) {
return pth.replace(/[\\]/g, '/');
}
exports.toCommandValue = toCommandValue;
exports.toPosixPath = toPosixPath;
/**
* toWin32Path converts the given path to the win32 form. On Linux, / will be
* replaced with \\.
*
* @param annotationProperties
* @returns The command properties to send with the actual annotation command
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
* @param pth. Path to transform.
* @return string Win32 path.
*/
function toCommandProperties(annotationProperties) {
if (!Object.keys(annotationProperties).length) {
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
function toWin32Path(pth) {
return pth.replace(/[/]/g, '\\');
}
exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
exports.toWin32Path = toWin32Path;
/**
* toPlatformPath converts the given path to a platform-specific path. It does
* this by replacing instances of / and \ with the platform-specific path
* separator.
*
* @param pth The path to platformize.
* @return string The platform-specific path.
*/
function toPlatformPath(pth) {
return pth.replace(/[/\\]/g, path.sep);
}
exports.toPlatformPath = toPlatformPath;
//# sourceMappingURL=path-utils.js.map
/***/ }),
/***/ 4087:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
/***/ 1327:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Context = void 0;
const fs_1 = __nccwpck_require__(7147);
exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const os_1 = __nccwpck_require__(2037);
class Context {
const fs_1 = __nccwpck_require__(7147);
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
class Summary {
constructor() {
this._buffer = '';
}
/**
* Hydrate the context from the environment
* Finds the summary file path from the environment, rejects if env var is not found or file does not exist
* Also checks r/w permissions.
*
* @returns step summary file path
*/
constructor() {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
filePath() {
return __awaiter(this, void 0, void 0, function* () {
if (this._filePath) {
return this._filePath;
}
else {
const path = process.env.GITHUB_EVENT_PATH;
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
if (!pathFromEnv) {
throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
}
}
this.eventName = process.env.GITHUB_EVENT_NAME;
this.sha = process.env.GITHUB_SHA;
this.ref = process.env.GITHUB_REF;
this.workflow = process.env.GITHUB_WORKFLOW;
this.action = process.env.GITHUB_ACTION;
this.actor = process.env.GITHUB_ACTOR;
this.job = process.env.GITHUB_JOB;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
try {
yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
}
catch (_a) {
throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
}
this._filePath = pathFromEnv;
return this._filePath;
});
}
get issue() {
const payload = this.payload;
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
/**
* Wraps content in an HTML tag, adding any HTML attributes
*
* @param {string} tag HTML tag to wrap
* @param {string | null} content content within the tag
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
*
* @returns {string} content wrapped in HTML element
*/
wrap(tag, content, attrs = {}) {
const htmlAttrs = Object.entries(attrs)
.map(([key, value]) => ` ${key}="${value}"`)
.join('');
if (!content) {
return `<${tag}${htmlAttrs}>`;
}
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
}
get repo() {
if (process.env.GITHUB_REPOSITORY) {
/**
* Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
*
* @param {SummaryWriteOptions} [options] (optional) options for write operation
*
* @returns {Promise<Summary>} summary instance
*/
write(options) {
return __awaiter(this, void 0, void 0, function* () {
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
const filePath = yield this.filePath();
const writeFunc = overwrite ? writeFile : appendFile;
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
return this.emptyBuffer();
});
}
/**
* Clears the summary buffer and wipes the summary file
*
* @returns {Summary} summary instance
*/
clear() {
return __awaiter(this, void 0, void 0, function* () {
return this.emptyBuffer().write({ overwrite: true });
});
}
/**
* Returns the current summary buffer as a string
*
* @returns {string} string of summary buffer
*/
stringify() {
return this._buffer;
}
/**
* If the summary buffer is empty
*
* @returns {boolen} true if the buffer is empty
*/
isEmptyBuffer() {
return this._buffer.length === 0;
}
/**
* Resets the summary buffer without writing to summary file
*
* @returns {Summary} summary instance
*/
emptyBuffer() {
this._buffer = '';
return this;
}
/**
* Adds raw text to the summary buffer
*
* @param {string} text content to add
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
*
* @returns {Summary} summary instance
*/
addRaw(text, addEOL = false) {
this._buffer += text;
return addEOL ? this.addEOL() : this;
}
/**
* Adds the operating system-specific end-of-line marker to the buffer
*
* @returns {Summary} summary instance
*/
addEOL() {
return this.addRaw(os_1.EOL);
}
/**
* Adds an HTML codeblock to the summary buffer
*
* @param {string} code content to render within fenced code block
* @param {string} lang (optional) language to syntax highlight code
*
* @returns {Summary} summary instance
*/
addCodeBlock(code, lang) {
const attrs = Object.assign({}, (lang && { lang }));
const element = this.wrap('pre', this.wrap('code', code), attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML list to the summary buffer
*
* @param {string[]} items list of items to render
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
*
* @returns {Summary} summary instance
*/
addList(items, ordered = false) {
const tag = ordered ? 'ol' : 'ul';
const listItems = items.map(item => this.wrap('li', item)).join('');
const element = this.wrap(tag, listItems);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML table to the summary buffer
*
* @param {SummaryTableCell[]} rows table rows
*
* @returns {Summary} summary instance
*/
addTable(rows) {
const tableBody = rows
.map(row => {
const cells = row
.map(cell => {
if (typeof cell === 'string') {
return this.wrap('td', cell);
}
const { header, data, colspan, rowspan } = cell;
const tag = header ? 'th' : 'td';
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
return this.wrap(tag, data, attrs);
})
.join('');
return this.wrap('tr', cells);
})
.join('');
const element = this.wrap('table', tableBody);
return this.addRaw(element).addEOL();
}
/**
* Adds a collapsable HTML details element to the summary buffer
*
* @param {string} label text for the closed state
* @param {string} content collapsable content
*
* @returns {Summary} summary instance
*/
addDetails(label, content) {
const element = this.wrap('details', this.wrap('summary', label) + content);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML image tag to the summary buffer
*
* @param {string} src path to the image you to embed
* @param {string} alt text description of the image
* @param {SummaryImageOptions} options (optional) addition image attributes
*
* @returns {Summary} summary instance
*/
addImage(src, alt, options) {
const { width, height } = options || {};
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML section heading element
*
* @param {string} text heading text
* @param {number | string} [level=1] (optional) the heading level, default: 1
*
* @returns {Summary} summary instance
*/
addHeading(text, level) {
const tag = `h${level}`;
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
? tag
: 'h1';
const element = this.wrap(allowedTag, text);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML thematic break (<hr>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addSeparator() {
const element = this.wrap('hr', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML line break (<br>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addBreak() {
const element = this.wrap('br', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML blockquote to the summary buffer
*
* @param {string} text quote text
* @param {string} cite (optional) citation url
*
* @returns {Summary} summary instance
*/
addQuote(text, cite) {
const attrs = Object.assign({}, (cite && { cite }));
const element = this.wrap('blockquote', text, attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML anchor tag to the summary buffer
*
* @param {string} text link text/content
* @param {string} href hyperlink
*
* @returns {Summary} summary instance
*/
addLink(text, href) {
const element = this.wrap('a', text, { href });
return this.addRaw(element).addEOL();
}
}
const _summary = new Summary();
/**
* @deprecated use `core.summary`
*/
exports.markdownSummary = _summary;
exports.summary = _summary;
//# sourceMappingURL=summary.js.map
/***/ }),
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toCommandProperties = exports.toCommandValue = void 0;
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
/**
*
* @param annotationProperties
* @returns The command properties to send with the actual annotation command
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/
function toCommandProperties(annotationProperties) {
if (!Object.keys(annotationProperties).length) {
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
}
exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 4087:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Context = void 0;
const fs_1 = __nccwpck_require__(7147);
const os_1 = __nccwpck_require__(2037);
class Context {
/**
* Hydrate the context from the environment
*/
constructor() {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
}
}
this.eventName = process.env.GITHUB_EVENT_NAME;
this.sha = process.env.GITHUB_SHA;
this.ref = process.env.GITHUB_REF;
this.workflow = process.env.GITHUB_WORKFLOW;
this.action = process.env.GITHUB_ACTION;
this.actor = process.env.GITHUB_ACTOR;
this.job = process.env.GITHUB_JOB;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
}
get repo() {
if (process.env.GITHUB_REPOSITORY) {
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
return { owner, repo };
}
......@@ -1137,7 +1731,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__nccwpck_require__(6341));
const httpClient = __importStar(__nccwpck_require__(6255));
function getAuthString(token, options) {
if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required');
......@@ -1222,7 +1816,95 @@ exports.getOctokitOptions = getOctokitOptions;
/***/ }),
/***/ 6341:
/***/ 5526:
/***/ (function(__unused_webpack_module, exports) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Bearer ${this.token}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
//# sourceMappingURL=auth.js.map
/***/ }),
/***/ 6255:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
......@@ -1260,7 +1942,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(3466));
const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294));
var HttpCodes;
(function (HttpCodes) {
......@@ -1834,7 +2516,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
/***/ }),
/***/ 3466:
/***/ 9835:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
......@@ -1902,860 +2584,184 @@ exports.checkBypass = checkBypass;
/***/ }),
/***/ 3702:
/***/ 334:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' +
Buffer.from(this.username + ':' + this.password).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
const REGEX_IS_INSTALLATION = /^ghs_/;
const REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
token: token,
tokenType
};
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] = 'Bearer ' + this.token;
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
/**
* Prefix token for usage in the Authorization header
*
* @param token OAuth token or JSON Web Token
*/
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
return `token ${token}`;
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
async function hook(token, request, route, parameters) {
const endpoint = request.endpoint.merge(route, parameters);
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 9925:
/***/ 6762:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const http = __nccwpck_require__(3685);
const https = __nccwpck_require__(5687);
const pm = __nccwpck_require__(6443);
let tunnel;
var HttpCodes;
(function (HttpCodes) {
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices";
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently";
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved";
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther";
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified";
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy";
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect";
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect";
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest";
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized";
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden";
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
var universalUserAgent = __nccwpck_require__(5030);
var beforeAfterHook = __nccwpck_require__(3682);
var request = __nccwpck_require__(6234);
var graphql = __nccwpck_require__(8467);
var authToken = __nccwpck_require__(334);
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
exports.HttpClientError = HttpClientError;
class HttpClientResponse {
constructor(message) {
this.message = message;
}
readBody() {
return new Promise(async (resolve, reject) => {
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => {
output = Buffer.concat([output, chunk]);
});
this.message.on('end', () => {
resolve(output.toString());
});
});
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) {
let parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:';
}
exports.isHttps = isHttps;
class HttpClient {
constructor(userAgent, handlers, requestOptions) {
this._ignoreSslError = false;
this._allowRedirects = true;
this._allowRedirectDowngrade = false;
this._maxRedirects = 50;
this._allowRetries = false;
this._maxRetries = 1;
this._keepAlive = false;
this._disposed = false;
this.userAgent = userAgent;
this.handlers = handlers || [];
this.requestOptions = requestOptions;
if (requestOptions) {
if (requestOptions.ignoreSslError != null) {
this._ignoreSslError = requestOptions.ignoreSslError;
}
this._socketTimeout = requestOptions.socketTimeout;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
}
get(requestUrl, additionalHeaders) {
return this.request('GET', requestUrl, null, additionalHeaders || {});
}
del(requestUrl, additionalHeaders) {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
}
post(requestUrl, data, additionalHeaders) {
return this.request('POST', requestUrl, data, additionalHeaders || {});
}
patch(requestUrl, data, additionalHeaders) {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
const VERSION = "3.6.0";
const _excluded = ["authStrategy"];
class Octokit {
constructor(options = {}) {
const hook = new beforeAfterHook.Collection();
const requestDefaults = {
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
hook: hook.bind(null, "request")
}),
mediaType: {
previews: [],
format: ""
}
}; // prepend default user agent with `options.userAgent` if set
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
put(requestUrl, data, additionalHeaders) {
return this.request('PUT', requestUrl, data, additionalHeaders || {});
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
head(requestUrl, additionalHeaders) {
return this.request('HEAD', requestUrl, null, additionalHeaders || {});
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
sendStream(verb, requestUrl, stream, additionalHeaders) {
return this.request(verb, requestUrl, stream, additionalHeaders);
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
async getJson(requestUrl, additionalHeaders = {}) {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
let res = await this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async postJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async putJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async patchJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
async request(verb, requestUrl, data, headers) {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
let parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
while (numTries < maxTries) {
response = await this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) {
authenticationHandler = this.handlers[i];
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
let parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol == 'https:' &&
parsedUrl.protocol != parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (let header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = await this.requestRaw(info, data);
redirectsRemaining--;
}
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
await response.readBody();
await this._performExponentialBackoff(numTries);
}
}
return response;
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return new Promise((resolve, reject) => {
let callbackForResult = function (err, res) {
if (err) {
reject(err);
}
resolve(res);
};
this.requestRawWithCallback(info, data, callbackForResult);
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
let socket;
if (typeof data === 'string') {
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
let handleResult = (err, res) => {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
};
let req = info.httpModule.request(info.options, (msg) => {
let res = new HttpClientResponse(msg);
handleResult(null, res);
});
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error('Request timeout: ' + info.options.path), null);
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err, null);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
let parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
this.handlers.forEach(handler => {
handler.prepareRequest(info.options);
});
}
return info;
}
_mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
let proxyUrl = pm.getProxyUrl(parsedUrl);
let useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (!!agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (!!this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
tunnel = __nccwpck_require__(4294);
}
const agentOptions = {
maxSockets: maxSockets,
keepAlive: this._keepAlive,
proxy: {
...((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
}),
host: proxyUrl.hostname,
port: proxyUrl.port
}
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
}
static dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
let a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
async _processResponse(res, options) {
return new Promise(async (resolve, reject) => {
const statusCode = res.message.statusCode;
const response = {
statusCode: statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode == HttpCodes.NotFound) {
resolve(response);
}
let obj;
let contents;
// get the result from the body
try {
contents = await res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = 'Failed request: (' + statusCode + ')';
}
let err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
});
}
}
exports.HttpClient = HttpClient;
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
/***/ }),
/***/ 6443:
/***/ ((__unused_webpack_module, exports) => {
if (!options.authStrategy) {
if (!options.auth) {
// (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function getProxyUrl(reqUrl) {
let usingSsl = reqUrl.protocol === 'https:';
let proxyUrl;
if (checkBypass(reqUrl)) {
return proxyUrl;
}
let proxyVar;
if (usingSsl) {
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
}
else {
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
}
if (proxyVar) {
proxyUrl = new URL(proxyVar);
}
return proxyUrl;
}
exports.getProxyUrl = getProxyUrl;
function checkBypass(reqUrl) {
if (!reqUrl.hostname) {
return false;
}
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) {
return false;
}
// Determine the request port
let reqPort;
if (reqUrl.port) {
reqPort = Number(reqUrl.port);
}
else if (reqUrl.protocol === 'http:') {
reqPort = 80;
}
else if (reqUrl.protocol === 'https:') {
reqPort = 443;
}
// Format the request hostname and hostname with port
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
if (typeof reqPort === 'number') {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
}
// Compare request host against noproxy
for (let upperNoProxyItem of noProxy
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
return true;
}
}
return false;
}
exports.checkBypass = checkBypass;
/***/ }),
/***/ 334:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
const REGEX_IS_INSTALLATION = /^ghs_/;
const REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
token: token,
tokenType
};
}
/**
* Prefix token for usage in the Authorization header
*
* @param token OAuth token or JSON Web Token
*/
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
return `token ${token}`;
}
async function hook(token, request, route, parameters) {
const endpoint = request.endpoint.merge(route, parameters);
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6762:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var universalUserAgent = __nccwpck_require__(5030);
var beforeAfterHook = __nccwpck_require__(3682);
var request = __nccwpck_require__(6234);
var graphql = __nccwpck_require__(8467);
var authToken = __nccwpck_require__(334);
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
const VERSION = "3.6.0";
const _excluded = ["authStrategy"];
class Octokit {
constructor(options = {}) {
const hook = new beforeAfterHook.Collection();
const requestDefaults = {
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
hook: hook.bind(null, "request")
}),
mediaType: {
previews: [],
format: ""
}
}; // prepend default user agent with `options.userAgent` if set
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if (!options.authStrategy) {
if (!options.auth) {
// (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
const {
authStrategy
} = options,
otherOptions = _objectWithoutProperties(options, _excluded);
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
const {
authStrategy
} = options,
otherOptions = _objectWithoutProperties(options, _excluded);
const auth = authStrategy(Object.assign({
request: this.request,
......@@ -3231,7 +3237,7 @@ exports.endpoint = endpoint;
Object.defineProperty(exports, "__esModule", ({ value: true }));
var request = __nccwpck_require__(3758);
var request = __nccwpck_require__(6234);
var universalUserAgent = __nccwpck_require__(5030);
const VERSION = "4.8.0";
......@@ -3349,234 +3355,37 @@ exports.withCustomRequest = withCustomRequest;
/***/ }),
/***/ 3758:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
/***/ 4193:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
const VERSION = "2.21.2";
var endpoint = __nccwpck_require__(9440);
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
const VERSION = "5.6.2";
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
enumerableOnly && (symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
})), keys.push.apply(keys, symbols);
}
function getBufferResponse(response) {
return response.arrayBuffer();
return keys;
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
return data.message;
} // istanbul ignore next - just in case
return `Unknown error: ${JSON.stringify(data)}`;
}
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
}
});
exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 4193:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const VERSION = "2.17.0";
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(Object(source), true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = null != arguments[i] ? arguments[i] : {};
i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
_defineProperty(target, key, source[key]);
}) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
return target;
......@@ -3726,7 +3535,7 @@ const composePaginateRest = Object.assign(paginate, {
iterator
});
const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/autolinks", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
function isPaginatingEndpoint(arg) {
if (typeof arg === "string") {
......@@ -3822,6 +3631,8 @@ function _defineProperty(obj, key, value) {
const Endpoints = {
actions: {
addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
......@@ -3833,6 +3644,8 @@ const Endpoints = {
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"],
deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"],
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
......@@ -3849,11 +3662,19 @@ const Endpoints = {
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"],
getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"],
getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"],
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
......@@ -3869,6 +3690,7 @@ const Endpoints = {
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"],
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
......@@ -3877,6 +3699,8 @@ const Endpoints = {
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"],
listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
......@@ -3889,14 +3713,27 @@ const Endpoints = {
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"],
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"],
removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"],
removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"],
removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"],
setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"],
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"]
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"],
setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"]
},
activity: {
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
......@@ -3937,16 +3774,6 @@ const Endpoints = {
}],
addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
checkToken: ["POST /applications/{client_id}/token"],
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createFromManifest: ["POST /app-manifests/{code}/conversions"],
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
......@@ -3988,6 +3815,8 @@ const Endpoints = {
billing: {
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"],
getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"],
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
......@@ -4017,6 +3846,7 @@ const Endpoints = {
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
renamed: ["codeScanning", "listAlertInstances"]
......@@ -4029,16 +3859,80 @@ const Endpoints = {
getAllCodesOfConduct: ["GET /codes_of_conduct"],
getConductCode: ["GET /codes_of_conduct/{key}"]
},
codespaces: {
addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"],
createForAuthenticatedUser: ["POST /user/codespaces"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"],
createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"],
createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"],
deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"],
exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"],
getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"],
getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"],
listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"],
listForAuthenticatedUser: ["GET /user/codespaces"],
listInOrganization: ["GET /orgs/{org}/codespaces", {}, {
renamedParameters: {
org_id: "org"
}
}],
listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"],
listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"],
setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"],
startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"],
updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
},
dependabot: {
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]
},
dependencyGraph: {
createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"],
diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]
},
emojis: {
get: ["GET /emojis"]
},
enterpriseAdmin: {
addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"],
listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"],
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
},
......@@ -4209,6 +4103,7 @@ const Endpoints = {
list: ["GET /organizations"],
listAppInstallations: ["GET /orgs/{org}/installations"],
listBlockedUsers: ["GET /orgs/{org}/blocks"],
listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"],
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
listForAuthenticatedUser: ["GET /user/orgs"],
listForUser: ["GET /users/{username}/orgs"],
......@@ -4337,12 +4232,14 @@ const Endpoints = {
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"],
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"],
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
},
......@@ -4366,6 +4263,7 @@ const Endpoints = {
}],
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
......@@ -4383,6 +4281,7 @@ const Endpoints = {
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
createRelease: ["POST /repos/{owner}/{repo}/releases"],
createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
......@@ -4405,6 +4304,7 @@ const Endpoints = {
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"],
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
......@@ -4423,11 +4323,7 @@ const Endpoints = {
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
getAllTopics: ["GET /repos/{owner}/{repo}/topics", {
mediaType: {
previews: ["mercy"]
}
}],
getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
......@@ -4493,6 +4389,7 @@ const Endpoints = {
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
listReleases: ["GET /repos/{owner}/{repo}/releases"],
listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
listTags: ["GET /repos/{owner}/{repo}/tags"],
listTeams: ["GET /repos/{owner}/{repo}/teams"],
listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
......@@ -4516,11 +4413,7 @@ const Endpoints = {
mapToData: "users"
}],
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", {
mediaType: {
previews: ["mercy"]
}
}],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
......@@ -4561,17 +4454,15 @@ const Endpoints = {
issuesAndPullRequests: ["GET /search/issues"],
labels: ["GET /search/labels"],
repos: ["GET /search/repositories"],
topics: ["GET /search/topics", {
mediaType: {
previews: ["mercy"]
}
}],
topics: ["GET /search/topics"],
users: ["GET /search/users"]
},
secretScanning: {
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
},
teams: {
......@@ -4687,1185 +4578,1842 @@ const Endpoints = {
}
};
const VERSION = "5.13.0";
const VERSION = "5.16.2";
function endpointsToMethods(octokit, endpointsMap) {
const newMethods = {};
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) {
newMethods[scope] = {};
}
const scopeMethods = newMethods[scope];
if (decorations) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
continue;
}
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
}
}
return newMethods;
}
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
/* istanbul ignore next */
function withDecorations(...args) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
if (decorations.renamedParameters) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults.endpoint.merge(...args);
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
if (name in options) {
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name];
}
}
return requestWithDefaults(options);
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults(...args);
}
return Object.assign(withDecorations, requestWithDefaults);
}
function restEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return {
rest: api
};
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api
});
}
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 537:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var deprecation = __nccwpck_require__(8932);
var once = _interopDefault(__nccwpck_require__(1223));
const logOnceCode = once(deprecation => console.warn(deprecation));
const logOnceHeaders = once(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = "HttpError";
this.status = statusCode;
let headers;
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
} // redact request credentials without mutating original request options
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
}
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy; // deprecations
Object.defineProperty(this, "code", {
get() {
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
});
Object.defineProperty(this, "headers", {
get() {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
return headers || {};
}
});
}
}
exports.RequestError = RequestError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6234:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var endpoint = __nccwpck_require__(9440);
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.3";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
function endpointsToMethods(octokit, endpointsMap) {
const newMethods = {};
return data.message;
} // istanbul ignore next - just in case
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) {
newMethods[scope] = {};
}
return `Unknown error: ${JSON.stringify(data)}`;
}
const scopeMethods = newMethods[scope];
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
if (decorations) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
continue;
}
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
}
});
return newMethods;
exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 3682:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var register = __nccwpck_require__(4670)
var addHook = __nccwpck_require__(5549)
var removeHook = __nccwpck_require__(6819)
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function.bind
var bindable = bind.bind(bind)
function bindApi (hook, state, name) {
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
hook.api = { remove: removeHookRef }
hook.remove = removeHookRef
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
var args = name ? [state, kind, name] : [state, kind]
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
})
}
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
/* istanbul ignore next */
function HookSingular () {
var singularHookName = 'h'
var singularHookState = {
registry: {}
}
var singularHook = register.bind(null, singularHookState, singularHookName)
bindApi(singularHook, singularHookState, singularHookName)
return singularHook
}
function withDecorations(...args) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
function HookCollection () {
var state = {
registry: {}
}
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
var hook = register.bind(null, state)
bindApi(hook, state)
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
return hook
}
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
var collectionHookDeprecationMessageDisplayed = false
function Hook () {
if (!collectionHookDeprecationMessageDisplayed) {
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection()
}
if (decorations.renamedParameters) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults.endpoint.merge(...args);
Hook.Singular = HookSingular.bind()
Hook.Collection = HookCollection.bind()
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
if (name in options) {
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
module.exports = Hook
// expose constructors as a named property for TypeScript
module.exports.Hook = Hook
module.exports.Singular = Hook.Singular
module.exports.Collection = Hook.Collection
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name];
}
}
/***/ }),
return requestWithDefaults(options);
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
/***/ 5549:
/***/ ((module) => {
module.exports = addHook;
return requestWithDefaults(...args);
function addHook(state, kind, name, hook) {
var orig = hook;
if (!state.registry[name]) {
state.registry[name] = [];
}
return Object.assign(withDecorations, requestWithDefaults);
}
if (kind === "before") {
hook = function (method, options) {
return Promise.resolve()
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
}
function restEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return {
rest: api
};
if (kind === "after") {
hook = function (method, options) {
var result;
return Promise.resolve()
.then(method.bind(null, options))
.then(function (result_) {
result = result_;
return orig(result, options);
})
.then(function () {
return result;
});
};
}
if (kind === "error") {
hook = function (method, options) {
return Promise.resolve()
.then(method.bind(null, options))
.catch(function (error) {
return orig(error, options);
});
};
}
state.registry[name].push({
hook: hook,
orig: orig,
});
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api
/***/ }),
/***/ 4670:
/***/ ((module) => {
module.exports = register;
function register(state, name, method, options) {
if (typeof method !== "function") {
throw new Error("method for before hook must be a function");
}
if (!options) {
options = {};
}
if (Array.isArray(name)) {
return name.reverse().reduce(function (callback, name) {
return register.bind(null, state, name, callback, options);
}, method)();
}
return Promise.resolve().then(function () {
if (!state.registry[name]) {
return method(options);
}
return state.registry[name].reduce(function (method, registered) {
return registered.hook.bind(null, method, options);
}, method)();
});
}
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6819:
/***/ ((module) => {
module.exports = removeHook;
function removeHook(state, name, method) {
if (!state.registry[name]) {
return;
}
var index = state.registry[name]
.map(function (registered) {
return registered.orig;
})
.indexOf(method);
if (index === -1) {
return;
}
state.registry[name].splice(index, 1);
}
/***/ }),
/***/ 537:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
/***/ 9051:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
/* istanbul ignore next */
const LCHOWN = fs.lchown ? 'lchown' : 'chown'
/* istanbul ignore next */
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
var deprecation = __nccwpck_require__(8932);
var once = _interopDefault(__nccwpck_require__(1223));
/* istanbul ignore next */
const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
const logOnceCode = once(deprecation => console.warn(deprecation));
const logOnceHeaders = once(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
const lchownSync = (path, uid, gid) => {
try {
return fs[LCHOWNSYNC](path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
const chownSync = (path, uid, gid) => {
try {
return fs.chownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
/* istanbul ignore next */
/* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
}
: (_, __, ___, cb) => cb
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
/* istanbul ignore next */
const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
}
}
: (path, uid, gid) => lchownSync(path, uid, gid)
this.name = "HttpError";
this.status = statusCode;
let headers;
// fs.readdir could only accept an options object as of node v6
const nodeVersion = process.version
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
const chown = (cpath, uid, gid, cb) => {
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
// Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
}
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
} // redact request credentials without mutating original request options
const chownrKid = (p, child, uid, gid, cb) => {
if (typeof child === 'string')
return fs.lstat(path.resolve(p, child), (er, stats) => {
// Skip ENOENT error
if (er)
return cb(er.code !== 'ENOENT' ? er : null)
stats.name = child
chownrKid(p, stats, uid, gid, cb)
})
if (child.isDirectory()) {
chownr(path.resolve(p, child.name), uid, gid, er => {
if (er)
return cb(er)
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
})
} else {
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
}
}
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
const chownr = (p, uid, gid, cb) => {
readdir(p, { withFileTypes: true }, (er, children) => {
// any error other than ENOTDIR or ENOTSUP means it's not readable,
// or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
}
if (er || !children.length)
return chown(p, uid, gid, cb)
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy; // deprecations
let len = children.length
let errState = null
const then = er => {
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
Object.defineProperty(this, "code", {
get() {
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
children.forEach(child => chownrKid(p, child, uid, gid, then))
})
}
});
Object.defineProperty(this, "headers", {
get() {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
return headers || {};
}
const chownrKidSync = (p, child, uid, gid) => {
if (typeof child === 'string') {
try {
const stats = fs.lstatSync(path.resolve(p, child))
stats.name = child
child = stats
} catch (er) {
if (er.code === 'ENOENT')
return
else
throw er
}
}
});
if (child.isDirectory())
chownrSync(path.resolve(p, child.name), uid, gid)
handleEISDirSync(path.resolve(p, child.name), uid, gid)
}
const chownrSync = (p, uid, gid) => {
let children
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
}
if (children && children.length)
children.forEach(child => chownrKidSync(p, child, uid, gid))
return handleEISDirSync(p, uid, gid)
}
exports.RequestError = RequestError;
//# sourceMappingURL=index.js.map
module.exports = chownr
chownr.sync = chownrSync
/***/ }),
/***/ 6234:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
/***/ 8932:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var endpoint = __nccwpck_require__(9440);
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.3";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
class Deprecation extends Error {
constructor(message) {
super(message); // Maintains proper stack trace (only available on V8)
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
/* istanbul ignore next */
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
this.name = 'Deprecation';
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
}
exports.Deprecation = Deprecation;
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
/***/ }),
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
/***/ 7714:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
"use strict";
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
const MiniPass = __nccwpck_require__(2505)
const EE = (__nccwpck_require__(2361).EventEmitter)
const fs = __nccwpck_require__(7147)
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
let writev = fs.writev
/* istanbul ignore next */
if (!writev) {
// This entire block can be removed if support for earlier than Node.js
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
if (/application\/json/.test(contentType)) {
return response.json();
writev = (fd, iovec, pos, cb) => {
const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
}
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
return getBufferResponse(response);
}
class ReadStream extends MiniPass {
constructor (path, opt) {
opt = opt || {}
super(opt)
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
this.readable = true
this.writable = false
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
if (typeof path !== 'string')
throw new TypeError('path must be a string')
return data.message;
} // istanbul ignore next - just in case
this[_errored] = false
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number')
this[_read]()
else
this[_open]()
}
return `Unknown error: ${JSON.stringify(data)}`;
}
get fd () { return this[_fd] }
get path () { return this[_path] }
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
write () {
throw new TypeError('this is a readable stream')
}
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
end () {
throw new TypeError('this is a readable stream')
}
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
[_onopen] (er, fd) {
if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
}
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0)
return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
}
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
[_onread] (er, br, buf) {
this[_reading] = false
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
});
exports.request = request;
//# sourceMappingURL=index.js.map
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
/***/ }),
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
/***/ 3682:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
}
var register = __nccwpck_require__(4670)
var addHook = __nccwpck_require__(5549)
var removeHook = __nccwpck_require__(6819)
return ret
}
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function.bind
var bindable = bind.bind(bind)
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
function bindApi (hook, state, name) {
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
hook.api = { remove: removeHookRef }
hook.remove = removeHookRef
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
var args = name ? [state, kind, name] : [state, kind]
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
})
}
case 'error':
if (this[_errored])
return
this[_errored] = true
return super.emit(ev, data)
function HookSingular () {
var singularHookName = 'h'
var singularHookState = {
registry: {}
default:
return super.emit(ev, data)
}
}
var singularHook = register.bind(null, singularHookState, singularHookName)
bindApi(singularHook, singularHookState, singularHookName)
return singularHook
}
function HookCollection () {
var state = {
registry: {}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
}
var hook = register.bind(null, state)
bindApi(hook, state)
return hook
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
}
}
var collectionHookDeprecationMessageDisplayed = false
function Hook () {
if (!collectionHookDeprecationMessageDisplayed) {
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
collectionHookDeprecationMessageDisplayed = true
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
return HookCollection()
}
Hook.Singular = HookSingular.bind()
Hook.Collection = HookCollection.bind()
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
module.exports = Hook
// expose constructors as a named property for TypeScript
module.exports.Hook = Hook
module.exports.Singular = Hook.Singular
module.exports.Collection = Hook.Collection
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null)
this[_open]()
}
/***/ }),
emit (ev, data) {
if (ev === 'error') {
if (this[_errored])
return
this[_errored] = true
}
return super.emit(ev, data)
}
/***/ 5549:
/***/ ((module) => {
module.exports = addHook;
get fd () { return this[_fd] }
get path () { return this[_path] }
function addHook(state, kind, name, hook) {
var orig = hook;
if (!state.registry[name]) {
state.registry[name] = [];
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
if (kind === "before") {
hook = function (method, options) {
return Promise.resolve()
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
}
if (kind === "after") {
hook = function (method, options) {
var result;
return Promise.resolve()
.then(method.bind(null, options))
.then(function (result_) {
result = result_;
return orig(result, options);
})
.then(function () {
return result;
});
};
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
}
}
if (kind === "error") {
hook = function (method, options) {
return Promise.resolve()
.then(method.bind(null, options))
.catch(function (error) {
return orig(error, options);
});
};
end (buf, enc) {
if (buf)
this.write(buf, enc)
this[_ended] = true
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
return this
}
state.registry[name].push({
hook: hook,
orig: orig,
});
}
write (buf, enc) {
if (typeof buf === 'string')
buf = Buffer.from(buf, enc)
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
/***/ }),
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
/***/ 4670:
/***/ ((module) => {
this[_writing] = true
this[_write](buf)
return true
}
module.exports = register;
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
function register(state, name, method, options) {
if (typeof method !== "function") {
throw new Error("method for before hook must be a function");
[_onwrite] (er, bw) {
if (er)
this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
}
if (!options) {
options = {};
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
}
if (Array.isArray(name)) {
return name.reverse().reduce(function (callback, name) {
return register.bind(null, state, name, callback, options);
}, method)();
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
} else
fd = fs.openSync(this[_path], this[_flags], this[_mode])
this[_onopen](null, fd)
}
return Promise.resolve().then(function () {
if (!state.registry[name]) {
return method(options);
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
}
return state.registry[name].reduce(function (method, registered) {
return registered.hook.bind(null, method, options);
}, method)();
});
[_write] (buf) {
// throw the original, but try to close if it fails
let threw = true
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
threw = false
} finally {
if (threw)
try { this[_close]() } catch (_) {}
}
}
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
/***/ }),
/***/ 6819:
/***/ ((module) => {
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
module.exports = removeHook;
function removeHook(state, name, method) {
if (!state.registry[name]) {
return;
}
/***/ }),
var index = state.registry[name]
.map(function (registered) {
return registered.orig;
})
.indexOf(method);
/***/ 2505:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (index === -1) {
return;
}
"use strict";
state.registry[name].splice(index, 1);
const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
}
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const SD = (__nccwpck_require__(1576).StringDecoder)
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
/***/ }),
/***/ 9051:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
const defer = fn => Promise.resolve().then(fn)
"use strict";
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
/* istanbul ignore next */
const LCHOWN = fs.lchown ? 'lchown' : 'chown'
/* istanbul ignore next */
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
/* istanbul ignore next */
const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
const lchownSync = (path, uid, gid) => {
try {
return fs[LCHOWNSYNC](path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
class Pipe {
constructor (src, dest, opts) {
this.src = src
this.dest = dest
this.opts = opts
this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
}
}
/* istanbul ignore next */
const chownSync = (path, uid, gid) => {
try {
return fs.chownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
unpipe () {
this.dest.removeListener('drain', this.ondrain)
}
// istanbul ignore next - only here for the prototype
proxyErrors () {}
end () {
this.unpipe()
if (this.opts.end)
this.dest.end()
}
}
/* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
class PipeProxyErrors extends Pipe {
unpipe () {
this.src.removeListener('error', this.proxyErrors)
super.unpipe()
}
: (_, __, ___, cb) => cb
/* istanbul ignore next */
const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
}
constructor (src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
: (path, uid, gid) => lchownSync(path, uid, gid)
// fs.readdir could only accept an options object as of node v6
const nodeVersion = process.version
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
const chown = (cpath, uid, gid, cb) => {
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
// Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
}
const chownrKid = (p, child, uid, gid, cb) => {
if (typeof child === 'string')
return fs.lstat(path.resolve(p, child), (er, stats) => {
// Skip ENOENT error
if (er)
return cb(er.code !== 'ENOENT' ? er : null)
stats.name = child
chownrKid(p, stats, uid, gid, cb)
})
if (child.isDirectory()) {
chownr(path.resolve(p, child.name), uid, gid, er => {
if (er)
return cb(er)
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
})
} else {
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = []
this.buffer = []
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
}
const chownr = (p, uid, gid, cb) => {
readdir(p, { withFileTypes: true }, (er, children) => {
// any error other than ENOTDIR or ENOTSUP means it's not readable,
// or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
}
if (er || !children.length)
return chown(p, uid, gid, cb)
let len = children.length
let errState = null
const then = er => {
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
get bufferLength () { return this[BUFFERLENGTH] }
children.forEach(child => chownrKid(p, child, uid, gid, then))
})
}
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
const chownrKidSync = (p, child, uid, gid) => {
if (typeof child === 'string') {
try {
const stats = fs.lstatSync(path.resolve(p, child))
stats.name = child
child = stats
} catch (er) {
if (er.code === 'ENOENT')
return
else
throw er
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
this[ENCODING] = enc
}
if (child.isDirectory())
chownrSync(path.resolve(p, child.name), uid, gid)
setEncoding (enc) {
this.encoding = enc
}
handleEISDirSync(path.resolve(p, child.name), uid, gid)
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
const chownrSync = (p, uid, gid) => {
let children
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
}
get ['async'] () { return this[ASYNC] }
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
if (children && children.length)
children.forEach(child => chownrKidSync(p, child, uid, gid))
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
return handleEISDirSync(p, uid, gid)
}
if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
module.exports = chownr
chownr.sync = chownrSync
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (!encoding)
encoding = 'utf8'
/***/ }),
const fn = this[ASYNC] ? defer : f => f()
/***/ 8932:
/***/ ((__unused_webpack_module, exports) => {
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
"use strict";
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
Object.defineProperty(exports, "__esModule", ({ value: true }));
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
class Deprecation extends Error {
constructor(message) {
super(message); // Maintains proper stack trace (only available on V8)
if (cb)
fn(cb)
/* istanbul ignore next */
return this.flowing
}
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
fn(cb)
return this.flowing
}
this.name = 'Deprecation';
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
}
}
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
exports.Deprecation = Deprecation;
// Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
/***/ }),
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
/***/ 7714:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
if (cb)
fn(cb)
"use strict";
return this.flowing
}
const MiniPass = __nccwpck_require__(1077)
const EE = (__nccwpck_require__(2361).EventEmitter)
const fs = __nccwpck_require__(7147)
read (n) {
if (this[DESTROYED])
return null
let writev = fs.writev
/* istanbul ignore next */
if (!writev) {
// This entire block can be removed if support for earlier than Node.js
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]()
return null
}
writev = (fd, iovec, pos, cb) => {
const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
if (this[OBJECTMODE])
n = null
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this.encoding)
this.buffer = [this.buffer.join('')]
else
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
}
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
}
}
const _autoClose = Symbol('_autoClose')
const _close = Symbol('_close')
const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
[READ] (n, chunk) {
if (n === chunk.length || n === null)
this[BUFFERSHIFT]()
else {
this.buffer[0] = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
class ReadStream extends MiniPass {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.emit('data', chunk)
this.readable = true
if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
}
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
if (typeof path !== 'string')
throw new TypeError('path must be a string')
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
}
this[_errored] = false
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// don't let the internal resume be overwritten
[RESUME] () {
if (this[DESTROYED])
return
if (typeof this[_fd] === 'number')
this[_read]()
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this[_open]()
this.emit('drain')
}
get fd () { return this[_fd] }
get path () { return this[_path] }
resume () {
return this[RESUME]()
}
write () {
throw new TypeError('this is a readable stream')
pause () {
this[FLOWING] = false
this[PAUSED] = true
}
end () {
throw new TypeError('this is a readable stream')
get destroyed () {
return this[DESTROYED]
}
[_open] () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
get flowing () {
return this[FLOWING]
}
[_onopen] (er, fd) {
if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
get paused () {
return this[PAUSED]
}
[_makeBuf] () {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
[BUFFERPUSH] (chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
this.buffer.push(chunk)
}
[_read] () {
if (!this[_reading]) {
this[_reading] = true
const buf = this[_makeBuf]()
/* istanbul ignore if */
if (buf.length === 0)
return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
[BUFFERSHIFT] () {
if (this.buffer.length) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer[0].length
}
return this.buffer.shift()
}
[_onread] (er, br, buf) {
this[_reading] = false
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
[FLUSH] (noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
[FLUSHCHUNK] (chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false
}
[_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
pipe (dest, opts) {
if (this[DESTROYED])
return
if (br === 0 || this[_remain] <= 0) {
ret = false
this[_close]()
super.end()
const ended = this[EMITTED_END]
opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end()
} else {
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts))
if (this[ASYNC])
defer(() => this[RESUME]())
else
this[RESUME]()
}
return ret
return dest
}
emit (ev, data) {
switch (ev) {
case 'prefinish':
case 'finish':
break
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
case 'error':
if (this[_errored])
return
this[_errored] = true
return super.emit(ev, data)
default:
return super.emit(ev, data)
unpipe (dest) {
const p = this.pipes.find(p => p.dest === dest)
if (p) {
this.pipes.splice(this.pipes.indexOf(p), 1)
p.unpipe()
}
}
}
class ReadStreamSync extends ReadStream {
[_open] () {
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
addListener (ev, fn) {
return this.on(ev, fn)
}
[_read] () {
let threw = true
try {
if (!this[_reading]) {
this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
on (ev, fn) {
const ret = super.on(ev, fn)
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC])
defer(() => fn.call(this, this[EMITTED_ERROR]))
else
fn.call(this, this[EMITTED_ERROR])
}
return ret
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
get emittedEnd () {
return this[EMITTED_END]
}
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle
const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
this[_defaultFlag] = opt.flags === undefined
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
if (this[_fd] === null)
this[_open]()
[MAYBE_EMIT_END] () {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
}
}
emit (ev, data) {
if (ev === 'error') {
if (this[_errored])
emit (ev, data, ...extra) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
return !data ? false
: this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
this[_errored] = true
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
}
return super.emit(ev, data)
}
get fd () { return this[_fd] }
get path () { return this[_path] }
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
// Some other unknown event
const ret = super.emit(ev, data, ...extra)
this[MAYBE_EMIT_END]()
return ret
}
[_onopen] (er, fd) {
if (this[_defaultFlag] &&
this[_flags] === 'r+' &&
er && er.code === 'ENOENT') {
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
[EMITDATA] (data) {
for (const p of this.pipes) {
if (p.dest.write(data) === false)
this.pause()
}
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
}
end (buf, enc) {
if (buf)
this.write(buf, enc)
this[_ended] = true
[EMITEND] () {
if (this[EMITTED_END])
return
// synthetic after-write logic, where drain/finish live
if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
return this
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC])
defer(() => this[EMITEND2]())
else
this[EMITEND2]()
}
write (buf, enc) {
if (typeof buf === 'string')
buf = Buffer.from(buf, enc)
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
[EMITEND2] () {
if (this[DECODER]) {
const data = this[DECODER].end()
if (data) {
for (const p of this.pipes) {
p.dest.write(data)
}
super.emit('data', data)
}
}
if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
for (const p of this.pipes) {
p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
}
this[_writing] = true
this[_write](buf)
return true
// const all = await stream.collect()
collect () {
const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
}
[_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
// const data = await stream.concat()
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
[_onwrite] (er, bw) {
if (er)
this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
// stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
// for await (let chunk of stream)
[ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
if (this[EOF])
return Promise.resolve({ done: true })
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
return { next }
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
// for (let chunk of stream)
[ITERATOR] () {
const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
}
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
destroy (er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er)
else
this.emit(DESTROYED)
return this
}
}
}
class WriteStreamSync extends WriteStream {
[_open] () {
let fd
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
} else
fd = fs.openSync(this[_path], this[_flags], this[_mode])
this[DESTROYED] = true
this[_onopen](null, fd)
}
// throw away all buffered data, it's never coming out
this.buffer.length = 0
this[BUFFERLENGTH] = 0
[_close] () {
if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
return this
}
[_write] (buf) {
// throw the original, but try to close if it fails
let threw = true
try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
threw = false
} finally {
if (threw)
try { this[_close]() } catch (_) {}
}
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
}
}
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
/***/ }),
......@@ -5915,1041 +6463,1135 @@ exports.isPlainObject = isPlainObject;
/***/ }),
/***/ 1077:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
/***/ 6769:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// Update with any zlib constants that are added or changed in the future.
// Node v6 didn't export this, so we just hard code the version and rely
// on all the other hard-coded values from zlib v4736. When node v6
// support drops, we can just export the realZlibConstants object.
const realZlibConstants = (__nccwpck_require__(9796).constants) ||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
module.exports = Object.freeze(Object.assign(Object.create(null), {
Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_MEM_ERROR: -4,
Z_BUF_ERROR: -5,
Z_VERSION_ERROR: -6,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
DEFLATE: 1,
INFLATE: 2,
GZIP: 3,
GUNZIP: 4,
DEFLATERAW: 5,
INFLATERAW: 6,
UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15,
Z_MIN_CHUNK: 64,
Z_MAX_CHUNK: Infinity,
Z_DEFAULT_CHUNK: 16384,
Z_MIN_MEMLEVEL: 1,
Z_MAX_MEMLEVEL: 9,
Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1,
BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))
/***/ }),
/***/ 3486:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
}
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const Yallist = __nccwpck_require__(665)
const SD = (__nccwpck_require__(1576).StringDecoder)
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = new Yallist()
this.buffer = new Yallist()
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
get bufferLength () { return this[BUFFERLENGTH] }
const assert = __nccwpck_require__(9491)
const Buffer = (__nccwpck_require__(4300).Buffer)
const realZlib = __nccwpck_require__(9796)
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
const constants = exports.constants = __nccwpck_require__(6769)
const Minipass = __nccwpck_require__(7557)
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
const OriginalBufferConcat = Buffer.concat
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
const _superWrite = Symbol('_superWrite')
class ZlibError extends Error {
constructor (err) {
super('zlib: ' + err.message)
this.code = err.code
this.errno = err.errno
/* istanbul ignore if */
if (!this.code)
this.code = 'ZLIB_ERROR'
this[ENCODING] = enc
this.message = 'zlib: ' + err.message
Error.captureStackTrace(this, this.constructor)
}
setEncoding (enc) {
this.encoding = enc
get name () {
return 'ZlibError'
}
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
// the Zlib class they all inherit from
// This thing manages the queue of requests, and returns
// true or false if there is anything in the queue when
// you call the .write() method.
const _opts = Symbol('opts')
const _flushFlag = Symbol('flushFlag')
const _finishFlushFlag = Symbol('finishFlushFlag')
const _fullFlushFlag = Symbol('fullFlushFlag')
const _handle = Symbol('handle')
const _onError = Symbol('onError')
const _sawError = Symbol('sawError')
const _level = Symbol('level')
const _strategy = Symbol('strategy')
const _ended = Symbol('ended')
const _defaultFullFlush = Symbol('_defaultFullFlush')
if (!encoding)
encoding = 'utf8'
class ZlibBase extends Minipass {
constructor (opts, mode) {
if (!opts || typeof opts !== 'object')
throw new TypeError('invalid options for ZlibBase constructor')
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
super(opts)
this[_sawError] = false
this[_ended] = false
this[_opts] = opts
// this ensures at this point that the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!this.objectMode && !chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
cb()
return this.flowing
this[_flushFlag] = opts.flush
this[_finishFlushFlag] = opts.finishFlush
// this will throw if any options are invalid for the class selected
try {
this[_handle] = new realZlib[mode](opts)
} catch (er) {
// make sure that all errors get decorated properly
throw new ZlibError(er)
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' && !this[OBJECTMODE] &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
this[_onError] = (err) => {
// no sense raising multiple errors, since we abort on the first one.
if (this[_sawError])
return
this[_sawError] = true
// there is no way to cleanly recover.
// continuing only obscures problems.
this.close()
this.emit('error', err)
}
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
this.once('end', () => this.close)
}
if (this.flowing) {
// if we somehow have something in the buffer, but we think we're
// flowing, then we need to flush all that out first, or we get
// chunks coming in out of order. Can't emit 'drain' here though,
// because we're mid-write, so that'd be bad.
if (this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
this.emit('data', chunk)
} else
this[BUFFERPUSH](chunk)
close () {
if (this[_handle]) {
this[_handle].close()
this[_handle] = null
this.emit('close')
}
}
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
reset () {
if (!this[_sawError]) {
assert(this[_handle], 'zlib binding closed')
return this[_handle].reset()
}
}
if (cb)
cb()
flush (flushFlag) {
if (this.ended)
return
return this.flowing
if (typeof flushFlag !== 'number')
flushFlag = this[_fullFlushFlag]
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
}
read (n) {
if (this[DESTROYED])
return null
end (chunk, encoding, cb) {
if (chunk)
this.write(chunk, encoding)
this.flush(this[_finishFlushFlag])
this[_ended] = true
return super.end(null, null, cb)
}
try {
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH])
return null
get ended () {
return this[_ended]
}
if (this[OBJECTMODE])
n = null
write (chunk, encoding, cb) {
// process the chunk using the sync process
// then super.write() all the outputted chunks
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this.encoding)
this.buffer = new Yallist([
Array.from(this.buffer).join('')
])
else
this.buffer = new Yallist([
Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
])
}
if (typeof chunk === 'string')
chunk = Buffer.from(chunk, encoding)
if (this[_sawError])
return
assert(this[_handle], 'zlib binding closed')
return this[READ](n || null, this.buffer.head.value)
// _processChunk tries to .close() the native handle after it's done, so we
// intercept that by temporarily making it a no-op.
const nativeHandle = this[_handle]._handle
const originalNativeClose = nativeHandle.close
nativeHandle.close = () => {}
const originalClose = this[_handle].close
this[_handle].close = () => {}
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
Buffer.concat = (args) => args
let result
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
? chunk[_flushFlag] : this[_flushFlag]
result = this[_handle]._processChunk(chunk, flushFlag)
// if we don't throw, reset it back how it was
Buffer.concat = OriginalBufferConcat
} catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
Buffer.concat = OriginalBufferConcat
this[_onError](new ZlibError(err))
} finally {
this[MAYBE_EMIT_END]()
if (this[_handle]) {
// Core zlib resets `_handle` to null after attempting to close the
// native handle. Our no-op handler prevented actual closure, but we
// need to restore the `._handle` property.
this[_handle]._handle = nativeHandle
nativeHandle.close = originalNativeClose
this[_handle].close = originalClose
// `_processChunk()` adds an 'error' listener. If we don't remove it
// after each call, these handlers start piling up.
this[_handle].removeAllListeners('error')
// make sure OUR error listener is still attached tho
}
}
}
[READ] (n, chunk) {
if (n === chunk.length || n === null)
this[BUFFERSHIFT]()
else {
this.buffer.head.value = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
if (this[_handle])
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
this.emit('data', chunk)
let writeReturn
if (result) {
if (Array.isArray(result) && result.length > 0) {
// The first buffer is always `handle._outBuffer`, which would be
// re-used for later invocations; so, we always have to copy that one.
writeReturn = this[_superWrite](Buffer.from(result[0]))
for (let i = 1; i < result.length; i++) {
writeReturn = this[_superWrite](result[i])
}
} else {
writeReturn = this[_superWrite](Buffer.from(result))
}
}
if (!this.buffer.length && !this[EOF])
this.emit('drain')
if (cb)
cb()
return writeReturn
}
return chunk
[_superWrite] (data) {
return super.write(data)
}
}
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
class Zlib extends ZlibBase {
constructor (opts, mode) {
opts = opts || {}
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
opts.flush = opts.flush || constants.Z_NO_FLUSH
opts.finishFlush = opts.finishFlush || constants.Z_FINISH
super(opts, mode)
this[_fullFlushFlag] = constants.Z_FULL_FLUSH
this[_level] = opts.level
this[_strategy] = opts.strategy
}
// don't let the internal resume be overwritten
[RESUME] () {
if (this[DESTROYED])
params (level, strategy) {
if (this[_sawError])
return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
}
if (!this[_handle])
throw new Error('cannot switch params when binding is closed')
resume () {
return this[RESUME]()
// no way to test this without also not supporting params at all
/* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
assert(this[_handle], 'zlib binding closed')
// .params() calls .flush(), but the latter is always async in the
// core zlib. We override .flush() temporarily to intercept that and
// flush synchronously.
const origFlush = this[_handle].flush
this[_handle].flush = (flushFlag, cb) => {
this.flush(flushFlag)
cb()
}
try {
this[_handle].params(level, strategy)
} finally {
this[_handle].flush = origFlush
}
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
}
}
}
pause () {
this[FLOWING] = false
this[PAUSED] = true
// minimal 2-byte header
class Deflate extends Zlib {
constructor (opts) {
super(opts, 'Deflate')
}
}
get destroyed () {
return this[DESTROYED]
class Inflate extends Zlib {
constructor (opts) {
super(opts, 'Inflate')
}
}
get flowing () {
return this[FLOWING]
// gzip - bigger header, same deflate compression
const _portable = Symbol('_portable')
class Gzip extends Zlib {
constructor (opts) {
super(opts, 'Gzip')
this[_portable] = opts && !!opts.portable
}
get paused () {
return this[PAUSED]
[_superWrite] (data) {
if (!this[_portable])
return super[_superWrite](data)
// we'll always get the header emitted in one first chunk
// overwrite the OS indicator byte with 0xFF
this[_portable] = false
data[9] = 255
return super[_superWrite](data)
}
}
[BUFFERPUSH] (chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
return this.buffer.push(chunk)
class Gunzip extends Zlib {
constructor (opts) {
super(opts, 'Gunzip')
}
}
[BUFFERSHIFT] () {
if (this.buffer.length) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer.head.value.length
}
return this.buffer.shift()
// raw - no header
class DeflateRaw extends Zlib {
constructor (opts) {
super(opts, 'DeflateRaw')
}
}
[FLUSH] (noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
class InflateRaw extends Zlib {
constructor (opts) {
super(opts, 'InflateRaw')
}
}
[FLUSHCHUNK] (chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false
// auto-detect header.
class Unzip extends Zlib {
constructor (opts) {
super(opts, 'Unzip')
}
}
pipe (dest, opts) {
if (this[DESTROYED])
return
const ended = this[EMITTED_END]
class Brotli extends ZlibBase {
constructor (opts, mode) {
opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() }
this.pipes.push(p)
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
dest.on('drain', p.ondrain)
this[RESUME]()
// piping an ended stream ends immediately
if (ended && p.opts.end)
p.dest.end()
return dest
}
super(opts, mode)
addListener (ev, fn) {
return this.on(ev, fn)
this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
}
}
on (ev, fn) {
try {
return super.on(ev, fn)
} finally {
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
fn.call(this, this[EMITTED_ERROR])
}
}
class BrotliCompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliCompress')
}
}
get emittedEnd () {
return this[EMITTED_END]
class BrotliDecompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliDecompress')
}
}
[MAYBE_EMIT_END] () {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
exports.Deflate = Deflate
exports.Inflate = Inflate
exports.Gzip = Gzip
exports.Gunzip = Gunzip
exports.DeflateRaw = DeflateRaw
exports.InflateRaw = InflateRaw
exports.Unzip = Unzip
/* istanbul ignore else */
if (typeof realZlib.BrotliCompress === 'function') {
exports.BrotliCompress = BrotliCompress
exports.BrotliDecompress = BrotliDecompress
} else {
exports.BrotliCompress = exports.BrotliDecompress = class {
constructor () {
throw new Error('Brotli is not supported in this version of Node.js')
}
}
}
emit (ev, data) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
if (!data)
return
if (this.pipes.length)
this.pipes.forEach(p =>
p.dest.write(data) === false && this.pause())
} else if (ev === 'end') {
// only actual end gets this treatment
if (this[EMITTED_END] === true)
return
this[EMITTED_END] = true
this.readable = false
if (this[DECODER]) {
data = this[DECODER].end()
if (data) {
this.pipes.forEach(p => p.dest.write(data))
super.emit('data', data)
}
}
/***/ }),
this.pipes.forEach(p => {
p.dest.removeListener('drain', p.ondrain)
if (p.opts.end)
p.dest.end()
})
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
}
/***/ 7557:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// TODO: replace with a spread operator when Node v4 support drops
const args = new Array(arguments.length)
args[0] = ev
args[1] = data
if (arguments.length > 2) {
for (let i = 2; i < arguments.length; i++) {
args[i] = arguments[i]
}
}
"use strict";
try {
return super.emit.apply(this, args)
} finally {
if (!isEndish(ev))
this[MAYBE_EMIT_END]()
else
this.removeAllListeners(ev)
}
}
const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
}
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const SD = (__nccwpck_require__(1576).StringDecoder)
// const all = await stream.collect()
collect () {
const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
}
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
// const data = await stream.concat()
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
const defer = fn => Promise.resolve().then(fn)
// stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
// for await (let chunk of stream)
[ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
if (this[EOF])
return Promise.resolve({ done: true })
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
return { next }
class Pipe {
constructor (src, dest, opts) {
this.src = src
this.dest = dest
this.opts = opts
this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
}
unpipe () {
this.dest.removeListener('drain', this.ondrain)
}
// istanbul ignore next - only here for the prototype
proxyErrors () {}
end () {
this.unpipe()
if (this.opts.end)
this.dest.end()
}
}
// for (let chunk of stream)
[ITERATOR] () {
const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
class PipeProxyErrors extends Pipe {
unpipe () {
this.src.removeListener('error', this.proxyErrors)
super.unpipe()
}
constructor (src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
}
destroy (er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er)
else
this.emit(DESTROYED)
return this
}
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = []
this.buffer = []
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
this[DESTROYED] = true
get bufferLength () { return this[BUFFERLENGTH] }
// throw away all buffered data, it's never coming out
this.buffer = new Yallist()
this[BUFFERLENGTH] = 0
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
return this
this[ENCODING] = enc
}
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
setEncoding (enc) {
this.encoding = enc
}
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
/***/ }),
get ['async'] () { return this[ASYNC] }
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
/***/ 6769:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
// Update with any zlib constants that are added or changed in the future.
// Node v6 didn't export this, so we just hard code the version and rely
// on all the other hard-coded values from zlib v4736. When node v6
// support drops, we can just export the realZlibConstants object.
const realZlibConstants = (__nccwpck_require__(9796).constants) ||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
module.exports = Object.freeze(Object.assign(Object.create(null), {
Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_MEM_ERROR: -4,
Z_BUF_ERROR: -5,
Z_VERSION_ERROR: -6,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
DEFLATE: 1,
INFLATE: 2,
GZIP: 3,
GUNZIP: 4,
DEFLATERAW: 5,
INFLATERAW: 6,
UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15,
Z_MIN_CHUNK: 64,
Z_MAX_CHUNK: Infinity,
Z_DEFAULT_CHUNK: 16384,
Z_MIN_MEMLEVEL: 1,
Z_MAX_MEMLEVEL: 9,
Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1,
BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (!encoding)
encoding = 'utf8'
/***/ }),
const fn = this[ASYNC] ? defer : f => f()
/***/ 3486:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
"use strict";
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
const assert = __nccwpck_require__(9491)
const Buffer = (__nccwpck_require__(4300).Buffer)
const realZlib = __nccwpck_require__(9796)
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
const constants = exports.constants = __nccwpck_require__(6769)
const Minipass = __nccwpck_require__(1077)
if (cb)
fn(cb)
const OriginalBufferConcat = Buffer.concat
return this.flowing
}
const _superWrite = Symbol('_superWrite')
class ZlibError extends Error {
constructor (err) {
super('zlib: ' + err.message)
this.code = err.code
this.errno = err.errno
/* istanbul ignore if */
if (!this.code)
this.code = 'ZLIB_ERROR'
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
fn(cb)
return this.flowing
}
this.message = 'zlib: ' + err.message
Error.captureStackTrace(this, this.constructor)
}
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
}
get name () {
return 'ZlibError'
}
}
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
// the Zlib class they all inherit from
// This thing manages the queue of requests, and returns
// true or false if there is anything in the queue when
// you call the .write() method.
const _opts = Symbol('opts')
const _flushFlag = Symbol('flushFlag')
const _finishFlushFlag = Symbol('finishFlushFlag')
const _fullFlushFlag = Symbol('fullFlushFlag')
const _handle = Symbol('handle')
const _onError = Symbol('onError')
const _sawError = Symbol('sawError')
const _level = Symbol('level')
const _strategy = Symbol('strategy')
const _ended = Symbol('ended')
const _defaultFullFlush = Symbol('_defaultFullFlush')
// Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
class ZlibBase extends Minipass {
constructor (opts, mode) {
if (!opts || typeof opts !== 'object')
throw new TypeError('invalid options for ZlibBase constructor')
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
super(opts)
this[_sawError] = false
this[_ended] = false
this[_opts] = opts
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
this[_flushFlag] = opts.flush
this[_finishFlushFlag] = opts.finishFlush
// this will throw if any options are invalid for the class selected
try {
this[_handle] = new realZlib[mode](opts)
} catch (er) {
// make sure that all errors get decorated properly
throw new ZlibError(er)
}
if (cb)
fn(cb)
this[_onError] = (err) => {
// no sense raising multiple errors, since we abort on the first one.
if (this[_sawError])
return
return this.flowing
}
this[_sawError] = true
read (n) {
if (this[DESTROYED])
return null
// there is no way to cleanly recover.
// continuing only obscures problems.
this.close()
this.emit('error', err)
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]()
return null
}
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
this.once('end', () => this.close)
}
if (this[OBJECTMODE])
n = null
close () {
if (this[_handle]) {
this[_handle].close()
this[_handle] = null
this.emit('close')
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this.encoding)
this.buffer = [this.buffer.join('')]
else
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
}
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
}
reset () {
if (!this[_sawError]) {
assert(this[_handle], 'zlib binding closed')
return this[_handle].reset()
[READ] (n, chunk) {
if (n === chunk.length || n === null)
this[BUFFERSHIFT]()
else {
this.buffer[0] = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
}
flush (flushFlag) {
if (this.ended)
return
this.emit('data', chunk)
if (typeof flushFlag !== 'number')
flushFlag = this[_fullFlushFlag]
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
}
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
this.flush(this[_finishFlushFlag])
this[_ended] = true
return super.end(null, null, cb)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
}
get ended () {
return this[_ended]
// don't let the internal resume be overwritten
[RESUME] () {
if (this[DESTROYED])
return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
}
write (chunk, encoding, cb) {
// process the chunk using the sync process
// then super.write() all the outputted chunks
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
resume () {
return this[RESUME]()
}
if (typeof chunk === 'string')
chunk = Buffer.from(chunk, encoding)
pause () {
this[FLOWING] = false
this[PAUSED] = true
}
if (this[_sawError])
return
assert(this[_handle], 'zlib binding closed')
get destroyed () {
return this[DESTROYED]
}
// _processChunk tries to .close() the native handle after it's done, so we
// intercept that by temporarily making it a no-op.
const nativeHandle = this[_handle]._handle
const originalNativeClose = nativeHandle.close
nativeHandle.close = () => {}
const originalClose = this[_handle].close
this[_handle].close = () => {}
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
Buffer.concat = (args) => args
let result
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
? chunk[_flushFlag] : this[_flushFlag]
result = this[_handle]._processChunk(chunk, flushFlag)
// if we don't throw, reset it back how it was
Buffer.concat = OriginalBufferConcat
} catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
Buffer.concat = OriginalBufferConcat
this[_onError](new ZlibError(err))
} finally {
if (this[_handle]) {
// Core zlib resets `_handle` to null after attempting to close the
// native handle. Our no-op handler prevented actual closure, but we
// need to restore the `._handle` property.
this[_handle]._handle = nativeHandle
nativeHandle.close = originalNativeClose
this[_handle].close = originalClose
// `_processChunk()` adds an 'error' listener. If we don't remove it
// after each call, these handlers start piling up.
this[_handle].removeAllListeners('error')
// make sure OUR error listener is still attached tho
}
}
get flowing () {
return this[FLOWING]
}
if (this[_handle])
this[_handle].on('error', er => this[_onError](new ZlibError(er)))
get paused () {
return this[PAUSED]
}
let writeReturn
if (result) {
if (Array.isArray(result) && result.length > 0) {
// The first buffer is always `handle._outBuffer`, which would be
// re-used for later invocations; so, we always have to copy that one.
writeReturn = this[_superWrite](Buffer.from(result[0]))
for (let i = 1; i < result.length; i++) {
writeReturn = this[_superWrite](result[i])
}
} else {
writeReturn = this[_superWrite](Buffer.from(result))
}
[BUFFERPUSH] (chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
this.buffer.push(chunk)
}
[BUFFERSHIFT] () {
if (this.buffer.length) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer[0].length
}
return this.buffer.shift()
}
if (cb)
cb()
return writeReturn
[FLUSH] (noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
}
[_superWrite] (data) {
return super.write(data)
[FLUSHCHUNK] (chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false
}
}
class Zlib extends ZlibBase {
constructor (opts, mode) {
pipe (dest, opts) {
if (this[DESTROYED])
return
const ended = this[EMITTED_END]
opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
opts.flush = opts.flush || constants.Z_NO_FLUSH
opts.finishFlush = opts.finishFlush || constants.Z_FINISH
super(opts, mode)
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end()
} else {
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts))
if (this[ASYNC])
defer(() => this[RESUME]())
else
this[RESUME]()
}
this[_fullFlushFlag] = constants.Z_FULL_FLUSH
this[_level] = opts.level
this[_strategy] = opts.strategy
return dest
}
params (level, strategy) {
if (this[_sawError])
return
if (!this[_handle])
throw new Error('cannot switch params when binding is closed')
unpipe (dest) {
const p = this.pipes.find(p => p.dest === dest)
if (p) {
this.pipes.splice(this.pipes.indexOf(p), 1)
p.unpipe()
}
}
// no way to test this without also not supporting params at all
/* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
addListener (ev, fn) {
return this.on(ev, fn)
}
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
assert(this[_handle], 'zlib binding closed')
// .params() calls .flush(), but the latter is always async in the
// core zlib. We override .flush() temporarily to intercept that and
// flush synchronously.
const origFlush = this[_handle].flush
this[_handle].flush = (flushFlag, cb) => {
this.flush(flushFlag)
cb()
}
try {
this[_handle].params(level, strategy)
} finally {
this[_handle].flush = origFlush
}
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
on (ev, fn) {
const ret = super.on(ev, fn)
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC])
defer(() => fn.call(this, this[EMITTED_ERROR]))
else
fn.call(this, this[EMITTED_ERROR])
}
return ret
}
}
// minimal 2-byte header
class Deflate extends Zlib {
constructor (opts) {
super(opts, 'Deflate')
get emittedEnd () {
return this[EMITTED_END]
}
}
class Inflate extends Zlib {
constructor (opts) {
super(opts, 'Inflate')
[MAYBE_EMIT_END] () {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
}
}
}
// gzip - bigger header, same deflate compression
const _portable = Symbol('_portable')
class Gzip extends Zlib {
constructor (opts) {
super(opts, 'Gzip')
this[_portable] = opts && !!opts.portable
}
emit (ev, data, ...extra) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
return !data ? false
: this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
}
[_superWrite] (data) {
if (!this[_portable])
return super[_superWrite](data)
// Some other unknown event
const ret = super.emit(ev, data, ...extra)
this[MAYBE_EMIT_END]()
return ret
}
// we'll always get the header emitted in one first chunk
// overwrite the OS indicator byte with 0xFF
this[_portable] = false
data[9] = 255
return super[_superWrite](data)
[EMITDATA] (data) {
for (const p of this.pipes) {
if (p.dest.write(data) === false)
this.pause()
}
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
}
}
class Gunzip extends Zlib {
constructor (opts) {
super(opts, 'Gunzip')
[EMITEND] () {
if (this[EMITTED_END])
return
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC])
defer(() => this[EMITEND2]())
else
this[EMITEND2]()
}
}
// raw - no header
class DeflateRaw extends Zlib {
constructor (opts) {
super(opts, 'DeflateRaw')
[EMITEND2] () {
if (this[DECODER]) {
const data = this[DECODER].end()
if (data) {
for (const p of this.pipes) {
p.dest.write(data)
}
super.emit('data', data)
}
}
for (const p of this.pipes) {
p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
}
}
class InflateRaw extends Zlib {
constructor (opts) {
super(opts, 'InflateRaw')
// const all = await stream.collect()
collect () {
const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
}
}
// auto-detect header.
class Unzip extends Zlib {
constructor (opts) {
super(opts, 'Unzip')
// const data = await stream.concat()
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
}
class Brotli extends ZlibBase {
constructor (opts, mode) {
opts = opts || {}
// stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
// for await (let chunk of stream)
[ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
super(opts, mode)
if (this[EOF])
return Promise.resolve({ done: true })
this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
}
}
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
class BrotliCompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliCompress')
return { next }
}
}
class BrotliDecompress extends Brotli {
constructor (opts) {
super(opts, 'BrotliDecompress')
// for (let chunk of stream)
[ITERATOR] () {
const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
}
}
exports.Deflate = Deflate
exports.Inflate = Inflate
exports.Gzip = Gzip
exports.Gunzip = Gunzip
exports.DeflateRaw = DeflateRaw
exports.InflateRaw = InflateRaw
exports.Unzip = Unzip
/* istanbul ignore else */
if (typeof realZlib.BrotliCompress === 'function') {
exports.BrotliCompress = BrotliCompress
exports.BrotliDecompress = BrotliDecompress
} else {
exports.BrotliCompress = exports.BrotliDecompress = class {
constructor () {
throw new Error('Brotli is not supported in this version of Node.js')
destroy (er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er)
else
this.emit(DESTROYED)
return this
}
this[DESTROYED] = true
// throw away all buffered data, it's never coming out
this.buffer.length = 0
this[BUFFERLENGTH] = 0
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
return this
}
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
}
}
......@@ -11856,3601 +12498,4258 @@ const pos = (buf) => {
const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
module.exports = {
encode,
parse,
}
/***/ }),
/***/ 1525:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
// tar -t
const hlo = __nccwpck_require__(5274)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const path = __nccwpck_require__(1017)
const stripSlash = __nccwpck_require__(8886)
module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
if (stat.size < readSize)
p.end(fs.readFileSync(file))
else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
}
const list = opt => new Parser(opt)
/***/ }),
/***/ 9624:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
const mkdirp = __nccwpck_require__(6186)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const chownr = __nccwpck_require__(9051)
const normPath = __nccwpck_require__(6843)
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
get name () {
return 'SylinkError'
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
}
get name () {
return 'CwdError'
}
}
const cGet = (cache, key) => cache.get(normPath(key))
const cSet = (cache, key, val) => cache.set(normPath(key), val)
const checkCwd = (dir, cb) => {
fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
module.exports = (dir, opt, cb) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
const done = (er, created) => {
if (er)
cb(er)
else {
cSet(cache, dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
else
cb()
}
}
if (cache && cGet(cache, dir) === true)
return done()
if (dir === cwd)
return checkCwd(dir, done)
if (preserve)
return mkdirp(dir, {mode}).then(made => done(null, made), done)
module.exports = {
encode,
parse,
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = normPath(path.resolve(base + '/' + p))
if (cGet(cache, part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
/***/ }),
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
fs.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr)
} else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
} else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
/***/ 1525:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
const checkCwdSync = dir => {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
}
"use strict";
module.exports.sync = (dir, opt) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
// tar -t
const hlo = __nccwpck_require__(5274)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const path = __nccwpck_require__(1017)
const stripSlash = __nccwpck_require__(8886)
const done = (created) => {
cSet(cache, dir, true)
if (created && doChown)
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (cache && cGet(cache, dir) === true)
return done()
if (typeof files === 'function')
cb = files, files = null
if (dir === cwd) {
checkCwdSync(cwd)
return done()
}
if (!files)
files = []
else
files = Array.from(files)
if (preserve)
return done(mkdirp.sync(dir, mode))
const opt = hlo(opt_)
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
part = normPath(path.resolve(part))
if (cGet(cache, part))
continue
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
try {
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
} catch (er) {
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cSet(cache, part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
}
}
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
return done(created)
}
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
/***/ }),
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
/***/ 8371:
/***/ ((module) => {
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
"use strict";
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
module.exports = (mode, isDir, portable) => {
mode &= 0o7777
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
// in portable mode, use the minimum reasonable umask
// if this system creates files with 0o664 by default
// (as some linux distros do), then we'll write the
// archive with 0o644 instead. Also, don't ever create
// a file that is not readable/writable by the owner.
if (portable)
mode = (mode | 0o600) & ~0o22
map.set(file, ret)
return ret
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
return mode
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
if (stat.size < readSize)
p.end(fs.readFileSync(file))
else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
/***/ }),
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
/***/ 7118:
/***/ ((module) => {
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null)
const {hasOwnProperty} = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s))
normalizeCache[s] = s.normalize('NFKD')
return normalizeCache[s]
}
const list = opt => new Parser(opt)
/***/ }),
/***/ 6843:
/***/ ((module) => {
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')
/***/ }),
/***/ 9624:
/***/ 7900:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const MiniPass = __nccwpck_require__(6684)
const zlib = __nccwpck_require__(3486)
const ReadEntry = __nccwpck_require__(8116)
const WriteEntry = __nccwpck_require__(5450)
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = __nccwpck_require__(665)
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const mkdirp = __nccwpck_require__(6186)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const chownr = __nccwpck_require__(9051)
const warner = __nccwpck_require__(5899)
const normPath = __nccwpck_require__(6843)
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
const Pack = warner(class Pack extends MiniPass {
constructor (opt) {
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
get name () {
return 'SylinkError'
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
if (this.portable)
opt.gzip.portable = true
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
[WRITE] (chunk) {
return super.write(chunk)
}
get name () {
return 'CwdError'
add (path) {
this.write(path)
return this
}
}
const cGet = (cache, key) => cache.get(normPath(key))
const cSet = (cache, key, val) => cache.set(normPath(key), val)
end (path) {
if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
const checkCwd = (dir, cb) => {
fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
write (path) {
if (this[ENDED])
throw new Error('write after end')
module.exports = (dir, opt, cb) => {
dir = normPath(dir)
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
[ADDTARENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p.path))
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
this[PROCESS]()
}
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
[ADDFSENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p))
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
const done = (er, created) => {
if (er)
cb(er)
else {
cSet(cache, dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er)
this.emit('error', er)
else
cb()
}
this[ONSTAT](job, stat)
})
}
if (cache && cGet(cache, dir) === true)
return done()
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
if (dir === cwd)
return checkCwd(dir, done)
[PROCESS] () {
if (this[PROCESSING])
return
if (preserve)
return mkdirp(dir, {mode}).then(made => done(null, made), done)
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
this[PROCESSING] = false
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = normPath(path.resolve(base + '/' + p))
if (cGet(cache, part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip)
this.zip.end(EOF)
else {
super.write(EOF)
super.end()
}
}
}
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
fs.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr)
} else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
} else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
}
}
const checkCwdSync = dir => {
let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
}
}
module.exports.sync = (dir, opt) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
[PROCESSJOB] (job) {
if (job.pending)
return
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return
}
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
else
this[STAT](job)
}
if (!job.stat)
return
const done = (created) => {
cSet(cache, dir, true)
if (created && doChown)
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
// filtered out!
if (job.ignore)
return
if (cache && cGet(cache, dir) === true)
return done()
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
if (dir === cwd) {
checkCwdSync(cwd)
return done()
}
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
if (preserve)
return done(mkdirp.sync(dir, mode))
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
}
const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
part = normPath(path.resolve(part))
if (cGet(cache, part))
continue
[ENTRYOPT] (job) {
return {
onwarn: (code, msg, data) => this.warn(code, msg, data),
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
}
[ENTRY] (job) {
this[JOBS] += 1
try {
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cSet(cache, part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
this.emit('error', er)
}
}
return done(created)
}
/***/ }),
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
/***/ 8371:
/***/ ((module) => {
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
"use strict";
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
module.exports = (mode, isDir, portable) => {
mode &= 0o7777
const source = job.entry
const zip = this.zip
// in portable mode, use the minimum reasonable umask
// if this system creates files with 0o664 by default
// (as some linux distros do), then we'll write the
// archive with 0o644 instead. Also, don't ever create
// a file that is not readable/writable by the owner.
if (portable)
mode = (mode | 0o600) & ~0o22
if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
} else {
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
}
}
// if dirs are readable, then they should be listable
if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
return mode
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
/***/ }),
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
/***/ 7118:
/***/ ((module) => {
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null)
const {hasOwnProperty} = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s))
normalizeCache[s] = s.normalize('NFKD')
return normalizeCache[s]
}
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
/***/ }),
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
/***/ 6843:
/***/ ((module) => {
if (zip) {
source.on('data', chunk => {
zip.write(chunk)
})
} else {
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
}
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
Pack.Sync = PackSync
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')
module.exports = Pack
/***/ }),
/***/ 7900:
/***/ 8917:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// A readable tar stream creator
// Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
//
// ignored entries get .resume() called on them straight away
const MiniPass = __nccwpck_require__(1077)
const zlib = __nccwpck_require__(3486)
const ReadEntry = __nccwpck_require__(8116)
const WriteEntry = __nccwpck_require__(5450)
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const warner = __nccwpck_require__(5899)
const Header = __nccwpck_require__(6043)
const EE = __nccwpck_require__(2361)
const Yallist = __nccwpck_require__(665)
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const maxMetaEntrySize = 1024 * 1024
const Entry = __nccwpck_require__(8116)
const Pax = __nccwpck_require__(7996)
const zlib = __nccwpck_require__(3486)
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const warner = __nccwpck_require__(5899)
const normPath = __nccwpck_require__(6843)
const noop = _ => true
const Pack = warner(class Pack extends MiniPass {
module.exports = warner(class Parser extends EE {
constructor (opt) {
opt = opt || {}
super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
// set to boolean false when an entry starts. 1024 bytes of \0
// is technically a valid tarball, albeit a boring one.
this[SAW_VALID_ENTRY] = null
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
if (this.portable)
opt.gzip.portable = true
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
// these BADARCHIVE errors can't be detected early. listen on DONE.
this.on(DONE, _ => {
if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
}
})
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
if (opt.ondone)
this.on(DONE, opt.ondone)
else {
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
}
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[BUFFER] = null
this[READENTRY] = null
this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
}
[WRITE] (chunk) {
return super.write(chunk)
}
[CONSUMEHEADER] (chunk, position) {
if (this[SAW_VALID_ENTRY] === null)
this[SAW_VALID_ENTRY] = false
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
add (path) {
this.write(path)
return this
}
if (header.nullBlock) {
if (this[SAW_NULL_BLOCK]) {
this[SAW_EOF] = true
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin')
this[STATE] = 'header'
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid)
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
else if (!header.path)
this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
end (path) {
if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
// we do this for meta & ignored entries as well, because they
// are still valid tar, or else we wouldn't know to ignore them
if (!this[SAW_VALID_ENTRY]) {
if (entry.remain) {
// this might be the one!
const onend = () => {
if (!entry.invalid)
this[SAW_VALID_ENTRY] = true
}
entry.on('end', onend)
} else
this[SAW_VALID_ENTRY] = true
}
write (path) {
if (this[ENDED])
throw new Error('write after end')
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
entry.resume()
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (path instanceof ReadEntry)
this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
if (entry.ignore) {
// probably valid, just not something we care about
this[EMIT]('ignoredEntry', entry)
this[STATE] = entry.remain ? 'ignore' : 'header'
entry.resume()
} else {
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'header'
entry.end()
}
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
}
[ADDTARENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p.path))
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p))
p.resume()
[PROCESSENTRY] (entry) {
let go = true
if (!entry) {
this[READENTRY] = null
go = false
} else if (Array.isArray(entry))
this.emit.apply(this, entry)
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
this[READENTRY] = entry
this.emit('entry', entry)
if (!entry.emittedEnd) {
entry.on('end', _ => this[NEXTENTRY]())
go = false
}
}
this[PROCESS]()
return go
}
[ADDFSENTRY] (p) {
const absolute = normPath(path.resolve(this.cwd, p))
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[NEXTENTRY] () {
do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
[STAT] (job) {
job.pending = true
this[JOBS] += 1
const stat = this.follow ? 'stat' : 'lstat'
fs[stat](job.absolute, (er, stat) => {
job.pending = false
this[JOBS] -= 1
if (er)
this.emit('error', er)
else
this[ONSTAT](job, stat)
})
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
}
[ONSTAT] (job, stat) {
this.statCache.set(job.absolute, stat)
job.stat = stat
[CONSUMEBODY] (chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
entry.write(c)
this[PROCESS]()
if (!entry.blockRemain) {
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
return c.length
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
[CONSUMEMETA] (chunk, position) {
const entry = this[WRITEENTRY]
const ret = this[CONSUMEBODY](chunk, position)
// if we finished, then the entry is reset
if (!this[WRITEENTRY])
this[EMITMETA](entry)
return ret
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
[EMIT] (ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY])
this.emit(ev, data, extra)
else
this[QUEUE].push([ev, data, extra])
}
[PROCESS] () {
if (this[PROCESSING])
return
[EMITMETA] (entry) {
this[EMIT]('meta', this[META])
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
this[PROCESSING] = true
for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
case 'GlobalExtendedHeader':
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
this[PROCESSING] = false
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (this.zip)
this.zip.end(EOF)
else {
super.write(EOF)
super.end()
}
}
}
case 'NextFileHasLongLinkpath':
this[EX] = this[EX] || Object.create(null)
this[EX].linkpath = this[META].replace(/\0.*/, '')
break
get [CURRENT] () {
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
/* istanbul ignore next */
default: throw new Error('unknown meta: ' + entry.type)
}
}
[JOBDONE] (job) {
this[QUEUE].shift()
this[JOBS] -= 1
this[PROCESS]()
abort (error) {
this[ABORTED] = true
this.emit('abort', error)
// always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false })
}
[PROCESSJOB] (job) {
if (job.pending)
return
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
write (chunk) {
if (this[ABORTED])
return
}
if (!job.stat) {
if (this.statCache.has(job.absolute))
this[ONSTAT](job, this.statCache.get(job.absolute))
else
this[STAT](job)
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
}
if (!job.stat)
return
// filtered out!
if (job.ignore)
return
this[WRITING] = true
if (this[UNZIP])
this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// we know it doesn't have an entry, because that got checked above
job.entry = this[ENTRY](job)
if (!job.entry) {
job.ignore = true
return
}
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length)
this[READENTRY].once('drain', _ => this.emit('drain'))
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return ret
}
[ENTRYOPT] (job) {
return {
onwarn: (code, msg, data) => this.warn(code, msg, data),
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
[BUFFERCONCAT] (c) {
if (c && !this[ABORTED])
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
[ENTRY] (job) {
this[JOBS] += 1
try {
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
.on('end', () => this[JOBDONE](job))
.on('error', er => this.emit('error', er))
} catch (er) {
this.emit('error', er)
[MAYBEEND] () {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, {entry})
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
}
}
[ONDRAIN] () {
if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
// like .pipe() but using super, because our write() is special
[PIPE] (job) {
job.piped = true
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
const source = job.entry
const zip = this.zip
[CONSUMECHUNK] (chunk) {
if (this[CONSUMING])
this[BUFFERCONCAT](chunk)
else if (!chunk && !this[BUFFER])
this[MAYBEEND]()
else {
this[CONSUMING] = true
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
} else
this[CONSUMECHUNKSUB](chunk)
if (zip) {
source.on('data', chunk => {
if (!zip.write(chunk))
source.pause()
})
} else {
source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
while (this[BUFFER] &&
this[BUFFER].length >= 512 &&
!this[ABORTED] &&
!this[SAW_EOF]) {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
}
this[CONSUMING] = false
}
}
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
})
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
if (!this[BUFFER] || this[ENDED])
this[MAYBEEND]()
}
// pause/resume are no-ops in sync streams.
pause () {}
resume () {}
[CONSUMECHUNKSUB] (chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
[STAT] (job) {
const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position)
break
[READDIR] (job, stat) {
this[ONREADDIR](job, fs.readdirSync(job.absolute))
}
case 'meta':
position += this[CONSUMEMETA](chunk, position)
break
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
/* istanbul ignore next */
default:
throw new Error('invalid state: ' + this[STATE])
}
}
if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
if (position < length) {
if (this[BUFFER])
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
else
this[BUFFER] = chunk.slice(position)
}
}
if (zip) {
source.on('data', chunk => {
zip.write(chunk)
})
} else {
source.on('data', chunk => {
super[WRITE](chunk)
})
end (chunk) {
if (!this[ABORTED]) {
if (this[UNZIP])
this[UNZIP].end(chunk)
else {
this[ENDED] = true
this.write(chunk)
}
}
}
}
Pack.Sync = PackSync
module.exports = Pack
})
/***/ }),
/***/ 8917:
/***/ 9587:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// this[BUFFER] is the remainder of a chunk if we're waiting for
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// ignored entries get .resume() called on them straight away
const warner = __nccwpck_require__(5899)
const Header = __nccwpck_require__(6043)
const EE = __nccwpck_require__(2361)
const Yallist = __nccwpck_require__(665)
const maxMetaEntrySize = 1024 * 1024
const Entry = __nccwpck_require__(8116)
const Pax = __nccwpck_require__(7996)
const zlib = __nccwpck_require__(3486)
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const assert = __nccwpck_require__(9491)
const normalize = __nccwpck_require__(7118)
const stripSlashes = __nccwpck_require__(8886)
const { join } = __nccwpck_require__(1017)
const noop = _ => true
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = warner(class Parser extends EE {
constructor (opt) {
opt = opt || {}
super(opt)
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
this.file = opt.file || ''
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// set to boolean false when an entry starts. 1024 bytes of \0
// is technically a valid tarball, albeit a boring one.
this[SAW_VALID_ENTRY] = null
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = join(set[set.length - 1], path)
set.push(path || '/')
return set
}, [])
return dirs
}
// these BADARCHIVE errors can't be detected early. listen on DONE.
this.on(DONE, _ => {
if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
}
})
// functions currently running
const running = new Set()
if (opt.ondone)
this.on(DONE, opt.ondone)
else {
this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
// return the queues for each path the function cares about
// fn => {paths, dirs}
const getQueues = fn => {
const res = reservations.get(fn)
/* istanbul ignore if - unpossible */
if (!res)
throw new Error('function does not have any path reservations')
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
const check = fn => {
const {paths, dirs} = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
this[QUEUE] = new Yallist()
this[BUFFER] = null
this[READENTRY] = null
this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
// run the function if it's first in line and not already running
const run = fn => {
if (running.has(fn) || !check(fn))
return false
running.add(fn)
fn(() => clear(fn))
return true
}
[CONSUMEHEADER] (chunk, position) {
if (this[SAW_VALID_ENTRY] === null)
this[SAW_VALID_ENTRY] = false
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
const clear = fn => {
if (!running.has(fn))
return false
if (header.nullBlock) {
if (this[SAW_NULL_BLOCK]) {
this[SAW_EOF] = true
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin')
this[STATE] = 'header'
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid)
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
else if (!header.path)
this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
const { paths, dirs } = reservations.get(fn)
const next = new Set()
// we do this for meta & ignored entries as well, because they
// are still valid tar, or else we wouldn't know to ignore them
if (!this[SAW_VALID_ENTRY]) {
if (entry.remain) {
// this might be the one!
const onend = () => {
if (!entry.invalid)
this[SAW_VALID_ENTRY] = true
}
entry.on('end', onend)
} else
this[SAW_VALID_ENTRY] = true
}
paths.forEach(path => {
const q = queues.get(path)
assert.equal(q[0], fn)
if (q.length === 1)
queues.delete(path)
else {
q.shift()
if (typeof q[0] === 'function')
next.add(q[0])
else
q[0].forEach(fn => next.add(fn))
}
})
if (entry.meta) {
if (entry.size > this.maxMetaEntrySize) {
entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
entry.resume()
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1)
queues.delete(dir)
else if (q[0].size === 1) {
q.shift()
if (entry.ignore) {
// probably valid, just not something we care about
this[EMIT]('ignoredEntry', entry)
this[STATE] = entry.remain ? 'ignore' : 'header'
entry.resume()
} else {
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'header'
entry.end()
}
// must be a function or else the Set would've been reused
next.add(q[0])
} else
q[0].delete(fn)
})
running.delete(fn)
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
next.forEach(fn => run(fn))
return true
}
[PROCESSENTRY] (entry) {
let go = true
const reserve = (paths, fn) => {
// collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
})
if (!entry) {
this[READENTRY] = null
go = false
} else if (Array.isArray(entry))
this.emit.apply(this, entry)
else {
this[READENTRY] = entry
this.emit('entry', entry)
if (!entry.emittedEnd) {
entry.on('end', _ => this[NEXTENTRY]())
go = false
}
}
const dirs = new Set(
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
)
reservations.set(fn, {dirs, paths})
paths.forEach(path => {
const q = queues.get(path)
if (!q)
queues.set(path, [fn])
else
q.push(fn)
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length - 1] instanceof Set)
q[q.length - 1].add(fn)
else
q.push(new Set([fn]))
})
return go
return run(fn)
}
[NEXTENTRY] () {
do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
return { check, reserve }
}
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
}
[CONSUMEBODY] (chunk, position) {
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
/***/ }),
entry.write(c)
/***/ 7996:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (!entry.blockRemain) {
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
"use strict";
return c.length
const Header = __nccwpck_require__(6043)
const path = __nccwpck_require__(1017)
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
}
[CONSUMEMETA] (chunk, position) {
const entry = this[WRITEENTRY]
const ret = this[CONSUMEBODY](chunk, position)
encode () {
const body = this.encodeBody()
if (body === '')
return null
// if we finished, then the entry is reset
if (!this[WRITEENTRY])
this[EMITMETA](entry)
const bodyLen = Buffer.byteLength(body)
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
return ret
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++)
buf[i] = 0
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null,
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++)
buf[i] = 0
return buf
}
[EMIT] (ev, data, extra) {
if (!this[QUEUE].length && !this[READENTRY])
this.emit(ev, data, extra)
else
this[QUEUE].push([ev, data, extra])
encodeBody () {
return (
this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
[EMITMETA] (entry) {
this[EMIT]('meta', this[META])
switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
encodeField (field) {
if (this[field] === null || this[field] === undefined)
return ''
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
case 'GlobalExtendedHeader':
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
const merge = (a, b) =>
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
case 'NextFileHasLongLinkpath':
this[EX] = this[EX] || Object.create(null)
this[EX].linkpath = this[META].replace(/\0.*/, '')
break
const parseKV = string =>
string
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
/* istanbul ignore next */
default: throw new Error('unknown meta: ' + entry.type)
}
}
const parseKVLine = (set, line) => {
const n = parseInt(line, 10)
abort (error) {
this[ABORTED] = true
this.emit('abort', error)
// always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false })
}
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1)
return set
write (chunk) {
if (this[ABORTED])
return
line = line.substr((n + ' ').length)
const kv = line.split('=')
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
// first write, might be gzipped
if (this[UNZIP] === null && chunk) {
if (this[BUFFER]) {
chunk = Buffer.concat([this[BUFFER], chunk])
this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
}
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
}
this[WRITING] = true
if (this[UNZIP])
this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
module.exports = Pax
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// if we have no queue, then that means a clogged READENTRY
if (!ret && !this[QUEUE].length)
this[READENTRY].once('drain', _ => this.emit('drain'))
/***/ }),
return ret
}
/***/ 8116:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
[BUFFERCONCAT] (c) {
if (c && !this[ABORTED])
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
"use strict";
[MAYBEEND] () {
if (this[ENDED] &&
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, {entry})
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
}
}
const MiniPass = __nccwpck_require__(6684)
const normPath = __nccwpck_require__(6843)
[CONSUMECHUNK] (chunk) {
if (this[CONSUMING])
this[BUFFERCONCAT](chunk)
else if (!chunk && !this[BUFFER])
this[MAYBEEND]()
else {
this[CONSUMING] = true
if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
} else
this[CONSUMECHUNKSUB](chunk)
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex
this.globalExtended = gex
this.header = header
this.startBlockSize = 512 * Math.ceil(header.size / 512)
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
while (this[BUFFER] &&
this[BUFFER].length >= 512 &&
!this[ABORTED] &&
!this[SAW_EOF]) {
const c = this[BUFFER]
this[BUFFER] = null
this[CONSUMECHUNKSUB](c)
}
this[CONSUMING] = false
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
if (!this[BUFFER] || this[ENDED])
this[MAYBEEND]()
}
this.path = normPath(header.path)
this.mode = header.mode
if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
[CONSUMECHUNKSUB] (chunk) {
// we know that we are in CONSUMING mode, so anything written goes into
// the buffer. Advance the position and put any remainder in the buffer.
let position = 0
const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
if (ex)
this[SLURP](ex)
if (gex)
this[SLURP](gex, true)
}
case 'ignore':
case 'body':
position += this[CONSUMEBODY](chunk, position)
break
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
case 'meta':
position += this[CONSUMEMETA](chunk, position)
break
const r = this.remain
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
/* istanbul ignore next */
default:
throw new Error('invalid state: ' + this[STATE])
}
}
if (r >= writeLen)
return super.write(data)
if (position < length) {
if (this[BUFFER])
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
else
this[BUFFER] = chunk.slice(position)
}
// r < writeLen
return super.write(data.slice(0, r))
}
end (chunk) {
if (!this[ABORTED]) {
if (this[UNZIP])
this[UNZIP].end(chunk)
else {
this[ENDED] = true
this.write(chunk)
}
[SLURP] (ex, global) {
for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
}
}
})
}
/***/ }),
/***/ 9587:
/***/ 5923:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const assert = __nccwpck_require__(9491)
const normalize = __nccwpck_require__(7118)
const stripSlashes = __nccwpck_require__(8886)
const { join } = __nccwpck_require__(1017)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
"use strict";
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = join(set[set.length - 1], path)
set.push(path || '/')
return set
}, [])
return dirs
}
// functions currently running
const running = new Set()
// tar -r
const hlo = __nccwpck_require__(5274)
const Pack = __nccwpck_require__(7900)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const t = __nccwpck_require__(1525)
const path = __nccwpck_require__(1017)
// return the queues for each path the function cares about
// fn => {paths, dirs}
const getQueues = fn => {
const res = reservations.get(fn)
/* istanbul ignore if - unpossible */
if (!res)
throw new Error('function does not have any path reservations')
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
// check if fn is first in line for all its paths, and is
// included in the first set for all its dir queues
const check = fn => {
const {paths, dirs} = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
const Header = __nccwpck_require__(6043)
// run the function if it's first in line and not already running
const run = fn => {
if (running.has(fn) || !check(fn))
return false
running.add(fn)
fn(() => clear(fn))
return true
}
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
const clear = fn => {
if (!running.has(fn))
return false
if (!opt.file)
throw new TypeError('file is required')
const { paths, dirs } = reservations.get(fn)
const next = new Set()
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
paths.forEach(path => {
const q = queues.get(path)
assert.equal(q[0], fn)
if (q.length === 1)
queues.delete(path)
else {
q.shift()
if (typeof q[0] === 'function')
next.add(q[0])
else
q[0].forEach(fn => next.add(fn))
}
})
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
dirs.forEach(dir => {
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1)
queues.delete(dir)
else if (q[0].size === 1) {
q.shift()
files = Array.from(files)
// must be a function or else the Set would've been reused
next.add(q[0])
} else
q[0].delete(fn)
})
running.delete(fn)
return opt.sync ? replaceSync(opt, files)
: replace(opt, files, cb)
}
next.forEach(fn => run(fn))
return true
}
const replaceSync = (opt, files) => {
const p = new Pack.Sync(opt)
const reserve = (paths, fn) => {
// collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
})
let threw = true
let fd
let position
const dirs = new Set(
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
)
reservations.set(fn, {dirs, paths})
paths.forEach(path => {
const q = queues.get(path)
if (!q)
queues.set(path, [fn])
else
q.push(fn)
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length - 1] instanceof Set)
q[q.length - 1].add(fn)
try {
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT')
fd = fs.openSync(opt.file, 'w+')
else
q.push(new Set([fn]))
})
return run(fn)
}
return { check, reserve }
}
throw er
}
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
/***/ }),
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
/***/ 7996:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
throw new Error('cannot append to compressed archives')
"use strict";
if (!bytes)
break POSITION
}
const Header = __nccwpck_require__(6043)
const path = __nccwpck_require__(1017)
const h = new Header(headBuf)
if (!h.cksumValid)
break
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
}
threw = false
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
streamSync(opt, p, position, fd, files)
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
addFilesSync(p, files)
}
encode () {
const body = this.encodeBody()
if (body === '')
return null
const replace = (opt, files, cb) => {
files = Array.from(files)
const p = new Pack(opt)
const bodyLen = Buffer.byteLength(body)
// round up to 512 bytes
// add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er)
fs.close(fd, _ => cb_(er))
else
cb_(null, pos)
}
// 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++)
buf[i] = 0
let position = 0
if (size === 0)
return cb(null, 0)
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null,
}).encode(buf)
let bufPos = 0
const headBuf = Buffer.alloc(512)
const onread = (er, bytes) => {
if (er)
return cb(er)
bufPos += bytes
if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
}
buf.write(body, 512, bodyLen, 'utf8')
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++)
buf[i] = 0
// truncated header
if (bufPos < 512)
return cb(null, position)
return buf
}
const h = new Header(headBuf)
if (!h.cksumValid)
return cb(null, position)
encodeBody () {
return (
this.encodeField('path') +
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > size)
return cb(null, position)
encodeField (field) {
if (this[field] === null || this[field] === undefined)
return ''
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
position += entryBlockSize + 512
if (position >= size)
return cb(null, position)
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const merge = (a, b) =>
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
const promise = new Promise((resolve, reject) => {
p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
const parseKV = string =>
string
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
if (er)
return reject(er)
const parseKVLine = (set, line) => {
const n = parseInt(line, 10)
fs.fstat(fd, (er, st) => {
if (er)
return fs.close(fd, () => reject(er))
// XXX Values with \n in them will fail this.
// Refactor to not be a naive line-by-line parse.
if (n !== Buffer.byteLength(line) + 1)
return set
getPos(fd, st.size, (er, position) => {
if (er)
return reject(er)
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
})
})
}
fs.open(opt.file, flag, onopen)
})
line = line.substr((n + ' ').length)
const kv = line.split('=')
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
return cb ? promise.then(cb, cb) : promise
}
const v = kv.join('=')
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
? new Date(v * 1000)
: /^[0-9]+$/.test(v) ? +v
: v
return set
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
}
module.exports = Pax
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
}
p.end()
}
/***/ }),
/***/ 8116:
/***/ 7111:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// unix absolute paths are also absolute on win32, so we use this for both
const { isAbsolute, parse } = (__nccwpck_require__(1017).win32)
const MiniPass = __nccwpck_require__(1077)
const normPath = __nccwpck_require__(6843)
// returns [root, stripped]
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
module.exports = path => {
let r = ''
const SLURP = Symbol('slurp')
module.exports = class ReadEntry extends MiniPass {
constructor (header, ex, gex) {
super()
// read entries always start life paused. this is to avoid the
// situation where Minipass's auto-ending empty streams results
// in an entry ending before we're ready for it.
this.pause()
this.extended = ex
this.globalExtended = gex
this.header = header
this.startBlockSize = 512 * Math.ceil(header.size / 512)
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
let parsed = parse(path)
while (isAbsolute(path) || parsed.root) {
// windows will think that //x/y/z has a "root" of //x/y/
// but strip the //?/C:/ off of //?/C:/path
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
: parsed.root
path = path.substr(root.length)
r += root
parsed = parse(path)
}
return [r, path]
}
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
/***/ }),
this.path = normPath(header.path)
this.mode = header.mode
if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
/***/ 8886:
/***/ ((module) => {
if (ex)
this[SLURP](ex)
if (gex)
this[SLURP](gex, true)
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
module.exports = str => {
let i = str.length - 1
let slashesStart = -1
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i
i--
}
return slashesStart === -1 ? str : str.slice(0, slashesStart)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
const r = this.remain
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
/***/ }),
if (r >= writeLen)
return super.write(data)
/***/ 4173:
/***/ ((__unused_webpack_module, exports) => {
// r < writeLen
return super.write(data.slice(0, r))
}
"use strict";
[SLURP] (ex, global) {
for (const k in ex) {
// we slurp in everything except for the path attribute in
// a global extended header, because that's weird.
if (ex[k] !== null && ex[k] !== undefined &&
!(global && k === 'path'))
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
}
}
}
// map types from key to human-friendly name
exports.name = new Map([
['0', 'File'],
// same as File
['', 'OldFile'],
['1', 'Link'],
['2', 'SymbolicLink'],
// Devices and FIFOs aren't fully supported
// they are parsed, but skipped when unpacking
['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
])
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
/***/ }),
/***/ 5923:
/***/ 7628:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// tar -r
const hlo = __nccwpck_require__(5274)
const Pack = __nccwpck_require__(7900)
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
// but the path reservations are required to avoid race conditions where
// parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
const assert = __nccwpck_require__(9491)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const t = __nccwpck_require__(1525)
const path = __nccwpck_require__(1017)
const mkdir = __nccwpck_require__(9624)
const wc = __nccwpck_require__(4808)
const pathReservations = __nccwpck_require__(9587)
const stripAbsolutePath = __nccwpck_require__(7111)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const normalize = __nccwpck_require__(7118)
// starting at the head of the file, read a Header
// If the checksum is invalid, that's our position to start writing
// If it is, jump forward by the specified size (round up to 512)
// and try again.
// Write the new Pack stream starting there.
const ONENTRY = Symbol('onEntry')
const CHECKFS = Symbol('checkFs')
const CHECKFS2 = Symbol('checkFs2')
const PRUNECACHE = Symbol('pruneCache')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = __nccwpck_require__(6113)
const getFlag = __nccwpck_require__(1172)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
const Header = __nccwpck_require__(6043)
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows)
return fs.unlink(path, cb)
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.rename(path, name, er => {
if (er)
return cb(er)
fs.unlink(name, cb)
})
}
if (!opt.file)
throw new TypeError('file is required')
/* istanbul ignore next */
const unlinkFileSync = path => {
if (!isWindows)
return fs.unlinkSync(path)
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.renameSync(path, name)
fs.unlinkSync(name)
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
.toLowerCase()
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs)
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path)
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0)
cache.delete(path)
}
}
const dropCache = cache => {
for (const key of cache.keys())
cache.delete(key)
}
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
class Unpack extends Parser {
constructor (opt) {
if (!opt)
opt = {}
files = Array.from(files)
opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
}
return opt.sync ? replaceSync(opt, files)
: replace(opt, files, cb)
}
super(opt)
const replaceSync = (opt, files) => {
const p = new Pack.Sync(opt)
this[CHECKED_CWD] = false
let threw = true
let fd
let position
this.reservations = pathReservations()
try {
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT')
fd = fs.openSync(opt.file, 'w+')
else
throw er
}
this.transform = typeof opt.transform === 'function' ? opt.transform : null
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
this.writable = true
this.readable = false
POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
this[PENDING] = 0
this[ENDED] = false
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
throw new Error('cannot append to compressed archives')
this.dirCache = opt.dirCache || new Map()
if (!bytes)
break POSITION
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
}
const h = new Header(headBuf)
if (!h.cksumValid)
break
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
}
threw = false
streamSync(opt, p, position, fd, files)
} finally {
if (threw) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
// default true for root
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
this.preserveOwner = process.getuid && process.getuid() === 0
else
this.preserveOwner = !!opt.preserveOwner
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
addFilesSync(p, files)
}
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid() : null
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
const replace = (opt, files, cb) => {
files = Array.from(files)
const p = new Pack(opt)
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
const getPos = (fd, size, cb_) => {
const cb = (er, pos) => {
if (er)
fs.close(fd, _ => cb_(er))
else
cb_(null, pos)
}
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || isWindows
let position = 0
if (size === 0)
return cb(null, 0)
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer
let bufPos = 0
const headBuf = Buffer.alloc(512)
const onread = (er, bytes) => {
if (er)
return cb(er)
bufPos += bytes
if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
}
// do not unpack over ANY files
this.keep = !!opt.keep
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime
// truncated header
if (bufPos < 512)
return cb(null, position)
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
const h = new Header(headBuf)
if (!h.cksumValid)
return cb(null, position)
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > size)
return cb(null, position)
this.cwd = normPath(path.resolve(opt.cwd || process.cwd()))
this.strip = +opt.strip || 0
// if we're not chmodding, then we don't need the process umask
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
position += entryBlockSize + 512
if (position >= size)
return cb(null, position)
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
this.on('entry', entry => this[ONENTRY](entry))
}
// a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn (code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT')
data.recoverable = false
return super.warn(code, msg, data)
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const promise = new Promise((resolve, reject) => {
p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
[CHECKPATH] (entry) {
if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip)
return false
entry.path = parts.slice(this.strip).join('/')
if (er)
return reject(er)
if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip)
entry.linkpath = linkparts.slice(this.strip).join('/')
else
return false
}
}
fs.fstat(fd, (er, st) => {
if (er)
return fs.close(fd, () => reject(er))
if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
getPos(fd, st.size, (er, position) => {
if (er)
return reject(er)
const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
// strip off the root
const [root, stripped] = stripAbsolutePath(p)
if (root) {
entry.path = stripped
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
entry,
path: p,
})
}
}
if (path.isAbsolute(entry.path))
entry.absolute = normPath(path.resolve(entry.path))
else
entry.absolute = normPath(path.resolve(this.cwd, entry.path))
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* istanbul ignore if - defense in depth */
if (!this.preservePaths &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd,
})
return false
}
fs.open(opt.file, flag, onopen)
})
return cb ? promise.then(cb, cb) : promise
}
// an archive can set properties on the extraction directory, but it
// may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir')
return false
const addFilesSync = (p, files) => {
files.forEach(file => {
if (file.charAt(0) === '@') {
t({
file: path.resolve(p.cwd, file.substr(1)),
sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
}
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = path.win32.parse(entry.absolute)
entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length))
const { root: pRoot } = path.win32.parse(entry.path)
entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length))
}
const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
return true
}
p.end()
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry))
return entry.resume()
/***/ }),
assert.equal(typeof entry.absolute, 'string')
/***/ 7111:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
// unix absolute paths are also absolute on win32, so we use this for both
const { isAbsolute, parse } = (__nccwpck_require__(1017).win32)
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
// returns [root, stripped]
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
module.exports = path => {
let r = ''
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry)
}
}
let parsed = parse(path)
while (isAbsolute(path) || parsed.root) {
// windows will think that //x/y/z has a "root" of //x/y/
// but strip the //?/C:/ off of //?/C:/path
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
: parsed.root
path = path.substr(root.length)
r += root
parsed = parse(path)
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError')
this.emit('error', er)
else {
this.warn('TAR_ENTRY_ERROR', er, {entry})
this[UNPEND]()
entry.resume()
}
}
return [r, path]
}
[MKDIR] (dir, mode, cb) {
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
/***/ }),
[DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
/***/ 8886:
/***/ ((module) => {
[UID] (entry) {
return uint32(this.uid, entry.uid, this.processUid)
}
// warning: extremely hot code path.
// This has been meticulously optimized for use
// within npm install on large package trees.
// Do not edit without careful benchmarking.
module.exports = str => {
let i = str.length - 1
let slashesStart = -1
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i
i--
[GID] (entry) {
return uint32(this.gid, entry.gid, this.processGid)
}
return slashesStart === -1 ? str : str.slice(0, slashesStart)
}
[FILE] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
/***/ }),
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
/***/ 4173:
/***/ ((__unused_webpack_module, exports) => {
let actions = 1
const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
"use strict";
this[ONERROR](er, entry)
fullyDone()
return
}
// map types from key to human-friendly name
exports.name = new Map([
['0', 'File'],
// same as File
['', 'OldFile'],
['1', 'Link'],
['2', 'SymbolicLink'],
// Devices and FIFOs aren't fully supported
// they are parsed, but skipped when unpacking
['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
])
if (--actions === 0) {
fs.close(stream.fd, er => {
if (er)
this[ONERROR](er, entry)
else
this[UNPEND]()
fullyDone()
})
}
}
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) {
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
if (this[DOCHOWN](entry)) {
actions++
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
done()
})
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
}
/***/ }),
[DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
/***/ 7628:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
let actions = 1
const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
"use strict";
if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
// but the path reservations are required to avoid race conditions where
// parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
done()
})
}
const assert = __nccwpck_require__(9491)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const path = __nccwpck_require__(1017)
const mkdir = __nccwpck_require__(9624)
const wc = __nccwpck_require__(4808)
const pathReservations = __nccwpck_require__(9587)
const stripAbsolutePath = __nccwpck_require__(7111)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const normalize = __nccwpck_require__(7118)
[UNSUPPORTED] (entry) {
entry.unsupported = true
this.warn('TAR_ENTRY_UNSUPPORTED',
`unsupported entry type: ${entry.type}`, {entry})
entry.resume()
}
const ONENTRY = Symbol('onEntry')
const CHECKFS = Symbol('checkFs')
const CHECKFS2 = Symbol('checkFs2')
const PRUNECACHE = Symbol('pruneCache')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = __nccwpck_require__(6113)
const getFlag = __nccwpck_require__(1172)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
[SYMLINK] (entry, done) {
this[LINK](entry, entry.linkpath, 'symlink', done)
}
// Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows)
return fs.unlink(path, cb)
[HARDLINK] (entry, done) {
const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
this[LINK](entry, linkpath, 'link', done)
}
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.rename(path, name, er => {
if (er)
return cb(er)
fs.unlink(name, cb)
})
}
[PEND] () {
this[PENDING]++
}
/* istanbul ignore next */
const unlinkFileSync = path => {
if (!isWindows)
return fs.unlinkSync(path)
[UNPEND] () {
this[PENDING]--
this[MAYBECLOSE]()
}
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
fs.renameSync(path, name)
fs.unlinkSync(name)
}
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
.toLowerCase()
// check if a thing is there, and if so, try to clobber it
[CHECKFS] (entry) {
this[PEND]()
const paths = [entry.path]
if (entry.linkpath)
paths.push(entry.linkpath)
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs)
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path)
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0)
cache.delete(path)
[PRUNECACHE] (entry) {
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink')
dropCache(this.dirCache)
else if (entry.type !== 'Directory')
pruneCache(this.dirCache, entry.absolute)
}
}
const dropCache = cache => {
for (const key of cache.keys())
cache.delete(key)
}
[CHECKFS2] (entry, fullyDone) {
this[PRUNECACHE](entry)
class Unpack extends Parser {
constructor (opt) {
if (!opt)
opt = {}
const done = er => {
this[PRUNECACHE](entry)
fullyDone(er)
}
opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
const checkCwd = () => {
this[MKDIR](this.cwd, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
this[CHECKED_CWD] = true
start()
})
}
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
}
afterMakeParent()
}
super(opt)
this[CHECKED_CWD] = false
this.reservations = pathReservations()
const afterMakeParent = () => {
fs.lstat(entry.absolute, (lstatEr, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
done()
return
}
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry, done)
this.transform = typeof opt.transform === 'function' ? opt.transform : null
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod)
return afterChmod()
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
this.writable = true
this.readable = false
// not a dir, and not reusable
// don't remove if the cwd, we want that error
if (entry.absolute === this.cwd)
return this[MAKEFS](null, entry, done)
this[PENDING] = 0
this[ENDED] = false
unlinkFile(entry.absolute, er =>
this[MAKEFS](er, entry, done))
})
}
this.dirCache = opt.dirCache || new Map()
if (this[CHECKED_CWD])
start()
else
checkCwd()
}
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
// need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
}
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
[MAKEFS] (er, entry, done) {
if (er) {
this[ONERROR](er, entry)
done()
return
}
// default true for root
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
this.preserveOwner = process.getuid && process.getuid() === 0
else
this.preserveOwner = !!opt.preserveOwner
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry, done)
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
process.getuid() : null
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
case 'Link':
return this[HARDLINK](entry, done)
// mostly just for testing, but useful in some cases.
// Forcibly trigger a chown on every entry, no matter what
this.forceChown = opt.forceChown === true
case 'SymbolicLink':
return this[SYMLINK](entry, done)
// turn ><?| in filenames into 0xf000-higher encoded forms
this.win32 = !!opt.win32 || isWindows
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry, done)
}
}
// do not unpack over files that are newer than what's in the archive
this.newer = !!opt.newer
[LINK] (entry, linkpath, link, done) {
// XXX: get the type ('symlink' or 'junction') for windows
fs[link](linkpath, entry.absolute, er => {
if (er)
this[ONERROR](er, entry)
else {
this[UNPEND]()
entry.resume()
}
done()
})
}
}
// do not unpack over ANY files
this.keep = !!opt.keep
const callSync = fn => {
try {
return [null, fn()]
} catch (er) {
return [er, null]
}
}
class UnpackSync extends Unpack {
[MAKEFS] (er, entry) {
return super[MAKEFS](er, entry, () => {})
}
// do not set mtime/atime of extracted entries
this.noMtime = !!opt.noMtime
[CHECKFS] (entry) {
this[PRUNECACHE](entry)
// allow .., absolute path entries, and unpacking through symlinks
// without this, warn and skip .., relativize absolutes, and error
// on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
if (!this[CHECKED_CWD]) {
const er = this[MKDIR](this.cwd, this.dmode)
if (er)
return this[ONERROR](er, entry)
this[CHECKED_CWD] = true
}
// unlink files and links before writing. This breaks existing hard
// links, and removes symlink directories rather than erroring
this.unlink = !!opt.unlink
// don't bother to make the parent if the current entry is the cwd,
// we've already checked it.
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode)
if (mkParent)
return this[ONERROR](mkParent, entry)
}
}
this.cwd = normPath(path.resolve(opt.cwd || process.cwd()))
this.strip = +opt.strip || 0
// if we're not chmodding, then we don't need the process umask
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
if (st && (this.keep || this.newer && st.mtime > entry.mtime))
return this[SKIP](entry)
// default mode for dirs created as parents
this.dmode = opt.dmode || (0o0777 & (~this.umask))
this.fmode = opt.fmode || (0o0666 & (~this.umask))
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry)
this.on('entry', entry => this[ONENTRY](entry))
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
}
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
}
// a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn (code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT')
data.recoverable = false
return super.warn(code, msg, data)
// not a dir, and not reusable.
// don't remove if it's the cwd, since we want that error.
const [er] = entry.absolute === this.cwd ? []
: callSync(() => unlinkFileSync(entry.absolute))
this[MAKEFS](er, entry)
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
[FILE] (entry, done) {
const mode = entry.mode & 0o7777 || this.fmode
const oner = er => {
let closeError
try {
fs.closeSync(fd)
} catch (e) {
closeError = e
}
if (er || closeError)
this[ONERROR](er || closeError, entry)
done()
}
}
[CHECKPATH] (entry) {
if (this.strip) {
const parts = normPath(entry.path).split('/')
if (parts.length < this.strip)
return false
entry.path = parts.slice(this.strip).join('/')
let fd
try {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
} catch (er) {
return oner(er)
}
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
if (entry.type === 'Link') {
const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip)
entry.linkpath = linkparts.slice(this.strip).join('/')
else
return false
try {
fs.fchownSync(fd, uid, gid)
} catch (fchowner) {
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
}
}
if (!this.preservePaths) {
const p = normPath(entry.path)
const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
oner(er)
})
}
// strip off the root
const [root, stripped] = stripAbsolutePath(p)
if (root) {
entry.path = stripped
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
entry,
path: p,
})
}
[DIRECTORY] (entry, done) {
const mode = entry.mode & 0o7777 || this.dmode
const er = this[MKDIR](entry.absolute, mode)
if (er) {
this[ONERROR](er, entry)
done()
return
}
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
}
done()
entry.resume()
}
if (path.isAbsolute(entry.path))
entry.absolute = normPath(path.resolve(entry.path))
else
entry.absolute = normPath(path.resolve(this.cwd, entry.path))
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* istanbul ignore if - defense in depth */
if (!this.preservePaths &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
[MKDIR] (dir, mode) {
try {
return mkdir.sync(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
})
return false
} catch (er) {
return er
}
}
// an archive can set properties on the extraction directory, but it
// may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir')
return false
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = path.win32.parse(entry.absolute)
entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length))
const { root: pRoot } = path.win32.parse(entry.path)
entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length))
[LINK] (entry, linkpath, link, done) {
try {
fs[link + 'Sync'](linkpath, entry.absolute)
done()
entry.resume()
} catch (er) {
return this[ONERROR](er, entry)
}
return true
}
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry))
return entry.resume()
Unpack.Sync = UnpackSync
module.exports = Unpack
assert.equal(typeof entry.absolute, 'string')
switch (entry.type) {
case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
/***/ }),
case 'File':
case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
/***/ 407:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
case 'CharacterDevice':
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry)
}
}
"use strict";
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError')
this.emit('error', er)
else {
this.warn('TAR_ENTRY_ERROR', er, {entry})
this[UNPEND]()
entry.resume()
}
}
[MKDIR] (dir, mode, cb) {
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
// tar -u
[DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
const hlo = __nccwpck_require__(5274)
const r = __nccwpck_require__(5923)
// just call tar.r with the filter and mtimeCache
[UID] (entry) {
return uint32(this.uid, entry.uid, this.processUid)
}
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
[GID] (entry) {
return uint32(this.gid, entry.gid, this.processGid)
}
if (!opt.file)
throw new TypeError('file is required')
[FILE] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
// flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
let actions = 1
const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
files = Array.from(files)
this[ONERROR](er, entry)
fullyDone()
return
}
mtimeFilter(opt)
return r(opt, files, cb)
}
if (--actions === 0) {
fs.close(stream.fd, er => {
if (er)
this[ONERROR](er, entry)
else
this[UNPEND]()
fullyDone()
})
}
}
const mtimeFilter = opt => {
const filter = opt.filter
if (!opt.mtimeCache)
opt.mtimeCache = new Map()
opt.filter = filter ? (path, stat) =>
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) {
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
/***/ }),
if (this[DOCHOWN](entry)) {
actions++
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
/***/ 5899:
/***/ ((module) => {
done()
})
"use strict";
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
module.exports = Base => class extends Base {
warn (code, message, data = {}) {
if (this.file)
data.file = this.file
if (this.cwd)
data.cwd = this.cwd
data.code = message instanceof Error && message.code || code
data.tarCode = code
if (!this.strict && data.recoverable !== false) {
if (message instanceof Error) {
data = Object.assign(message, data)
message = message.message
}
this.emit('warn', data.tarCode, message, data)
} else if (message instanceof Error)
this.emit('error', Object.assign(message, data))
else
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
}
}
[DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
let actions = 1
const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
/***/ }),
if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
/***/ 4808:
/***/ ((module) => {
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
"use strict";
done()
})
}
[UNSUPPORTED] (entry) {
entry.unsupported = true
this.warn('TAR_ENTRY_UNSUPPORTED',
`unsupported entry type: ${entry.type}`, {entry})
entry.resume()
}
// When writing files on Windows, translate the characters to their
// 0xf000 higher-encoded versions.
[SYMLINK] (entry, done) {
this[LINK](entry, entry.linkpath, 'symlink', done)
}
const raw = [
'|',
'<',
'>',
'?',
':',
]
[HARDLINK] (entry, done) {
const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
this[LINK](entry, linkpath, 'link', done)
}
const win = raw.map(char =>
String.fromCharCode(0xf000 + char.charCodeAt(0)))
[PEND] () {
this[PENDING]++
}
const toWin = new Map(raw.map((char, i) => [char, win[i]]))
const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
[UNPEND] () {
this[PENDING]--
this[MAYBECLOSE]()
}
module.exports = {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
}
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// Check if we can reuse an existing filesystem entry safely and
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
/***/ }),
// check if a thing is there, and if so, try to clobber it
[CHECKFS] (entry) {
this[PEND]()
const paths = [entry.path]
if (entry.linkpath)
paths.push(entry.linkpath)
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
/***/ 5450:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
[PRUNECACHE] (entry) {
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink')
dropCache(this.dirCache)
else if (entry.type !== 'Directory')
pruneCache(this.dirCache, entry.absolute)
}
"use strict";
[CHECKFS2] (entry, fullyDone) {
this[PRUNECACHE](entry)
const MiniPass = __nccwpck_require__(6684)
const Pax = __nccwpck_require__(7996)
const Header = __nccwpck_require__(6043)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const done = er => {
this[PRUNECACHE](entry)
fullyDone(er)
}
const prefixPath = (path, prefix) => {
if (!prefix)
return normPath(path)
path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
const checkCwd = () => {
this[MKDIR](this.cwd, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
this[CHECKED_CWD] = true
start()
})
}
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = __nccwpck_require__(5899)
const winchars = __nccwpck_require__(4808)
const stripAbsolutePath = __nccwpck_require__(7111)
const modeFix = __nccwpck_require__(8371)
const WriteEntry = warner(class WriteEntry extends MiniPass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string')
throw new TypeError('path is required')
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
this.fd = null
this.blockLen = null
this.blockRemain = null
this.buf = null
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
afterMakeParent()
}
const afterMakeParent = () => {
fs.lstat(entry.absolute, (lstatEr, st) => {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
done()
return
}
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry, done)
this.win32 = !!opt.win32 || process.platform === 'win32'
if (this.win32) {
// force the \ to / normalization, since we might not *actually*
// be on windows, but want \ to be considered a path separator.
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
}
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod)
return afterChmod()
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
// not a dir, and not reusable
// don't remove if the cwd, we want that error
if (entry.absolute === this.cwd)
return this[MAKEFS](null, entry, done)
if (this.path === '')
this.path = './'
unlinkFile(entry.absolute, er =>
this[MAKEFS](er, entry, done))
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
}
if (this[CHECKED_CWD])
start()
if (this.statCache.has(this.absolute))
this[ONLSTAT](this.statCache.get(this.absolute))
else
checkCwd()
this[LSTAT]()
}
[MAKEFS] (er, entry, done) {
if (er) {
this[ONERROR](er, entry)
done()
return
emit (ev, ...data) {
if (ev === 'error')
this[HAD_ERROR] = true
return super.emit(ev, ...data)
}
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er)
return this.emit('error', er)
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
switch (entry.type) {
case 'File':
case 'OldFile':
case 'ContiguousFile':
return this[FILE](entry, done)
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
case 'Link':
return this[HARDLINK](entry, done)
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
case 'SymbolicLink':
return this[SYMLINK](entry, done)
[HEADER] () {
if (this.type === 'Directory' && this.portable)
this.noMtime = true
case 'Directory':
case 'GNUDumpDir':
return this[DIRECTORY](entry, done)
this.header = new Header({
path: this[PREFIX](this.path),
// only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
}
super.write(this.header.block)
}
[LINK] (entry, linkpath, link, done) {
// XXX: get the type ('symlink' or 'junction') for windows
fs[link](linkpath, entry.absolute, er => {
[DIRECTORY] () {
if (this.path.substr(-1) !== '/')
this.path += '/'
this.stat.size = 0
this[HEADER]()
this.end()
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er)
this[ONERROR](er, entry)
else {
this[UNPEND]()
entry.resume()
return this.emit('error', er)
this[ONREADLINK](linkpath)
})
}
[ONREADLINK] (linkpath) {
this.linkpath = normPath(linkpath)
this[HEADER]()
this.end()
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
}
done()
this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]()
if (this.stat.size === 0)
return this.end()
this[OPENFILE]()
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
})
}
}
const callSync = fn => {
try {
return [null, fn()]
} catch (er) {
return [er, null]
[ONOPENFILE] (fd) {
this.fd = fd
if (this[HAD_ERROR])
return this[CLOSE]()
this.blockLen = 512 * Math.ceil(this.stat.size / 512)
this.blockRemain = this.blockLen
const bufLen = Math.min(this.blockLen, this.maxReadSize)
this.buf = Buffer.allocUnsafe(bufLen)
this.offset = 0
this.pos = 0
this.remain = this.stat.size
this.length = this.buf.length
this[READ]()
}
}
class UnpackSync extends Unpack {
[MAKEFS] (er, entry) {
return super[MAKEFS](er, entry, () => {})
[READ] () {
const { fd, buf, offset, length, pos } = this
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
}
[CHECKFS] (entry) {
this[PRUNECACHE](entry)
[CLOSE] (cb) {
fs.close(this.fd, cb)
}
if (!this[CHECKED_CWD]) {
const er = this[MKDIR](this.cwd, this.dmode)
if (er)
return this[ONERROR](er, entry)
this[CHECKED_CWD] = true
[ONREAD] (bytesRead) {
if (bytesRead <= 0 && this.remain > 0) {
const er = new Error('encountered unexpected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
// don't bother to make the parent if the current entry is the cwd,
// we've already checked it.
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode)
if (mkParent)
return this[ONERROR](mkParent, entry)
}
if (bytesRead > this.remain) {
const er = new Error('did not encounter expected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
if (st && (this.keep || this.newer && st.mtime > entry.mtime))
return this[SKIP](entry)
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry)
if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
// null out the rest of the buffer, if we could fit the block padding
// at the end of this loop, we've incremented bytesRead and this.remain
// to be incremented up to the blockRemain level, as if we had expected
// to get a null-padded file, and read it until the end. then we will
// decrement both remain and blockRemain by bytesRead, and know that we
// reached the expected EOF, without any null buffer to append.
if (bytesRead === this.remain) {
for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
this.buf[i + this.offset] = 0
bytesRead++
this.remain++
}
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
}
// not a dir, and not reusable.
// don't remove if it's the cwd, since we want that error.
const [er] = entry.absolute === this.cwd ? []
: callSync(() => unlinkFileSync(entry.absolute))
this[MAKEFS](er, entry)
const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
const flushed = this.write(writeBuf)
if (!flushed)
this[AWAITDRAIN](() => this[ONDRAIN]())
else
this[ONDRAIN]()
}
[FILE] (entry, done) {
const mode = entry.mode & 0o7777 || this.fmode
[AWAITDRAIN] (cb) {
this.once('drain', cb)
}
const oner = er => {
let closeError
try {
fs.closeSync(fd)
} catch (e) {
closeError = e
}
if (er || closeError)
this[ONERROR](er || closeError, entry)
done()
write (writeBuf) {
if (this.blockRemain < writeBuf.length) {
const er = new Error('writing more data than expected')
er.path = this.absolute
return this.emit('error', er)
}
this.remain -= writeBuf.length
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
}
let fd
try {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
} catch (er) {
return oner(er)
[ONDRAIN] () {
if (!this.remain) {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return this[CLOSE](er => er ? this.emit('error', er) : this.end())
}
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
if (this.offset >= this.length) {
// if we only have a smaller bit left to read, alloc a smaller buffer
// otherwise, keep it the same length it was before.
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
this.offset = 0
}
this.length = this.buf.length - this.offset
this[READ]()
}
})
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
class WriteEntrySync extends WriteEntry {
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
}
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
[SYMLINK] () {
this[ONREADLINK](fs.readlinkSync(this.absolute))
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
[OPENFILE] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
}
[READ] () {
let threw = true
try {
const { fd, buf, offset, length, pos } = this
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](bytesRead)
threw = false
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try {
fs.fchownSync(fd, uid, gid)
} catch (fchowner) {
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
this[CLOSE](() => {})
} catch (er) {}
}
}
}
oner(er)
})
[AWAITDRAIN] (cb) {
cb()
}
[DIRECTORY] (entry, done) {
const mode = entry.mode & 0o7777 || this.dmode
const er = this[MKDIR](entry.absolute, mode)
if (er) {
this[ONERROR](er, entry)
done()
return
}
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
[CLOSE] (cb) {
fs.closeSync(this.fd)
cb()
}
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.prefix = opt.prefix || null
this.path = normPath(readEntry.path)
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
done()
entry.resume()
}
[MKDIR] (dir, mode) {
try {
return mkdir.sync(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
this.remain = readEntry.size
this.blockRemain = readEntry.startBlockSize
this.header = new Header({
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
} catch (er) {
return er
}
}
[LINK] (entry, linkpath, link, done) {
try {
fs[link + 'Sync'](linkpath, entry.absolute)
done()
entry.resume()
} catch (er) {
return this[ONERROR](er, entry)
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
gid: this.portable ? null : this.gid,
mtime: this.noMtime ? null : this.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
}
}
}
Unpack.Sync = UnpackSync
module.exports = Unpack
super.write(this.header.block)
readEntry.pipe(this)
}
/***/ }),
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
/***/ 407:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
"use strict";
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
this.blockRemain -= writeLen
return super.write(data)
}
end () {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return super.end()
}
})
// tar -u
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const hlo = __nccwpck_require__(5274)
const r = __nccwpck_require__(5923)
// just call tar.r with the filter and mtimeCache
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = (opt_, files, cb) => {
const opt = hlo(opt_)
module.exports = WriteEntry
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip)
throw new TypeError('cannot append to compressed archives')
/***/ }),
if (!files || !Array.isArray(files) || !files.length)
throw new TypeError('no files or directories specified')
/***/ 6684:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
files = Array.from(files)
"use strict";
mtimeFilter(opt)
return r(opt, files, cb)
const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
}
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const SD = (__nccwpck_require__(1576).StringDecoder)
const mtimeFilter = opt => {
const filter = opt.filter
if (!opt.mtimeCache)
opt.mtimeCache = new Map()
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
opt.filter = filter ? (path, stat) =>
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}
const defer = fn => Promise.resolve().then(fn)
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
/***/ }),
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
/***/ 5899:
/***/ ((module) => {
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
"use strict";
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = Base => class extends Base {
warn (code, message, data = {}) {
if (this.file)
data.file = this.file
if (this.cwd)
data.cwd = this.cwd
data.code = message instanceof Error && message.code || code
data.tarCode = code
if (!this.strict && data.recoverable !== false) {
if (message instanceof Error) {
data = Object.assign(message, data)
message = message.message
}
this.emit('warn', data.tarCode, message, data)
} else if (message instanceof Error)
this.emit('error', Object.assign(message, data))
else
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
class Pipe {
constructor (src, dest, opts) {
this.src = src
this.dest = dest
this.opts = opts
this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
}
unpipe () {
this.dest.removeListener('drain', this.ondrain)
}
// istanbul ignore next - only here for the prototype
proxyErrors () {}
end () {
this.unpipe()
if (this.opts.end)
this.dest.end()
}
}
class PipeProxyErrors extends Pipe {
unpipe () {
this.src.removeListener('error', this.proxyErrors)
super.unpipe()
}
constructor (src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
}
/***/ }),
/***/ 4808:
/***/ ((module) => {
"use strict";
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = []
this.buffer = []
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
get bufferLength () { return this[BUFFERLENGTH] }
// When writing files on Windows, translate the characters to their
// 0xf000 higher-encoded versions.
get encoding () { return this[ENCODING] }
set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
const raw = [
'|',
'<',
'>',
'?',
':',
]
if (this[ENCODING] && enc !== this[ENCODING] &&
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
const win = raw.map(char =>
String.fromCharCode(0xf000 + char.charCodeAt(0)))
if (this[ENCODING] !== enc) {
this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
const toWin = new Map(raw.map((char, i) => [char, win[i]]))
const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
this[ENCODING] = enc
}
module.exports = {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
}
setEncoding (enc) {
this.encoding = enc
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
/***/ }),
get ['async'] () { return this[ASYNC] }
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
/***/ 5450:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
write (chunk, encoding, cb) {
if (this[EOF])
throw new Error('write after end')
"use strict";
if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
const MiniPass = __nccwpck_require__(1077)
const Pax = __nccwpck_require__(7996)
const Header = __nccwpck_require__(6043)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
const prefixPath = (path, prefix) => {
if (!prefix)
return normPath(path)
path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
if (!encoding)
encoding = 'utf8'
const maxReadSize = 16 * 1024 * 1024
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = __nccwpck_require__(5899)
const winchars = __nccwpck_require__(4808)
const stripAbsolutePath = __nccwpck_require__(7111)
const fn = this[ASYNC] ? defer : f => f()
const modeFix = __nccwpck_require__(8371)
// convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
const WriteEntry = warner(class WriteEntry extends MiniPass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string')
throw new TypeError('path is required')
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
// handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
this.fd = null
this.blockLen = null
this.blockRemain = null
this.buf = null
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
if (cb)
fn(cb)
this.win32 = !!opt.win32 || process.platform === 'win32'
if (this.win32) {
// force the \ to / normalization, since we might not *actually*
// be on windows, but want \ to be considered a path separator.
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
return this.flowing
}
this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
if (this.path === '')
this.path = './'
// at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
fn(cb)
return this.flowing
}
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
// fast-path writing strings of same encoding to a stream with
// an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
}
if (this.statCache.has(this.absolute))
this[ONLSTAT](this.statCache.get(this.absolute))
else
this[LSTAT]()
}
if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
emit (ev, ...data) {
if (ev === 'error')
this[HAD_ERROR] = true
return super.emit(ev, ...data)
}
// Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er)
return this.emit('error', er)
this[ONLSTAT](stat)
})
}
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
if (cb)
fn(cb)
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
return this.flowing
}
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
read (n) {
if (this[DESTROYED])
return null
[HEADER] () {
if (this.type === 'Directory' && this.portable)
this.noMtime = true
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
this[MAYBE_EMIT_END]()
return null
}
this.header = new Header({
path: this[PREFIX](this.path),
// only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this[OBJECTMODE])
n = null
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this.encoding)
this.buffer = [this.buffer.join('')]
else
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
}
super.write(this.header.block)
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
}
[DIRECTORY] () {
if (this.path.substr(-1) !== '/')
this.path += '/'
this.stat.size = 0
this[HEADER]()
this.end()
[READ] (n, chunk) {
if (n === chunk.length || n === null)
this[BUFFERSHIFT]()
else {
this.buffer[0] = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
this.emit('data', chunk)
if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er)
return this.emit('error', er)
this[ONREADLINK](linkpath)
})
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
}
[ONREADLINK] (linkpath) {
this.linkpath = normPath(linkpath)
this[HEADER]()
this.end()
// don't let the internal resume be overwritten
[RESUME] () {
if (this[DESTROYED])
return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
resume () {
return this[RESUME]()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
}
this.linkCache.set(linkKey, this.absolute)
}
pause () {
this[FLOWING] = false
this[PAUSED] = true
}
this[HEADER]()
if (this.stat.size === 0)
return this.end()
get destroyed () {
return this[DESTROYED]
}
this[OPENFILE]()
get flowing () {
return this[FLOWING]
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
})
get paused () {
return this[PAUSED]
}
[ONOPENFILE] (fd) {
this.fd = fd
if (this[HAD_ERROR])
return this[CLOSE]()
[BUFFERPUSH] (chunk) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
this.buffer.push(chunk)
}
this.blockLen = 512 * Math.ceil(this.stat.size / 512)
this.blockRemain = this.blockLen
const bufLen = Math.min(this.blockLen, this.maxReadSize)
this.buf = Buffer.allocUnsafe(bufLen)
this.offset = 0
this.pos = 0
this.remain = this.stat.size
this.length = this.buf.length
this[READ]()
[BUFFERSHIFT] () {
if (this.buffer.length) {
if (this[OBJECTMODE])
this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer[0].length
}
return this.buffer.shift()
}
[READ] () {
const { fd, buf, offset, length, pos } = this
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
[FLUSH] (noDrain) {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
}
[CLOSE] (cb) {
fs.close(this.fd, cb)
[FLUSHCHUNK] (chunk) {
return chunk ? (this.emit('data', chunk), this.flowing) : false
}
[ONREAD] (bytesRead) {
if (bytesRead <= 0 && this.remain > 0) {
const er = new Error('encountered unexpected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
pipe (dest, opts) {
if (this[DESTROYED])
return
if (bytesRead > this.remain) {
const er = new Error('did not encounter expected EOF')
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
const ended = this[EMITTED_END]
opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
// null out the rest of the buffer, if we could fit the block padding
// at the end of this loop, we've incremented bytesRead and this.remain
// to be incremented up to the blockRemain level, as if we had expected
// to get a null-padded file, and read it until the end. then we will
// decrement both remain and blockRemain by bytesRead, and know that we
// reached the expected EOF, without any null buffer to append.
if (bytesRead === this.remain) {
for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
this.buf[i + this.offset] = 0
bytesRead++
this.remain++
}
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end()
} else {
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts))
if (this[ASYNC])
defer(() => this[RESUME]())
else
this[RESUME]()
}
const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
return dest
}
const flushed = this.write(writeBuf)
if (!flushed)
this[AWAITDRAIN](() => this[ONDRAIN]())
else
this[ONDRAIN]()
unpipe (dest) {
const p = this.pipes.find(p => p.dest === dest)
if (p) {
this.pipes.splice(this.pipes.indexOf(p), 1)
p.unpipe()
}
}
[AWAITDRAIN] (cb) {
this.once('drain', cb)
addListener (ev, fn) {
return this.on(ev, fn)
}
write (writeBuf) {
if (this.blockRemain < writeBuf.length) {
const er = new Error('writing more data than expected')
er.path = this.absolute
return this.emit('error', er)
on (ev, fn) {
const ret = super.on(ev, fn)
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC])
defer(() => fn.call(this, this[EMITTED_ERROR]))
else
fn.call(this, this[EMITTED_ERROR])
}
this.remain -= writeBuf.length
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
return ret
}
[ONDRAIN] () {
if (!this.remain) {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return this[CLOSE](er => er ? this.emit('error', er) : this.end())
get emittedEnd () {
return this[EMITTED_END]
}
[MAYBE_EMIT_END] () {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
}
}
if (this.offset >= this.length) {
// if we only have a smaller bit left to read, alloc a smaller buffer
// otherwise, keep it the same length it was before.
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
this.offset = 0
emit (ev, data, ...extra) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
return !data ? false
: this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
}
this.length = this.buf.length - this.offset
this[READ]()
}
})
class WriteEntrySync extends WriteEntry {
[LSTAT] () {
this[ONLSTAT](fs.lstatSync(this.absolute))
// Some other unknown event
const ret = super.emit(ev, data, ...extra)
this[MAYBE_EMIT_END]()
return ret
}
[SYMLINK] () {
this[ONREADLINK](fs.readlinkSync(this.absolute))
[EMITDATA] (data) {
for (const p of this.pipes) {
if (p.dest.write(data) === false)
this.pause()
}
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
}
[OPENFILE] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
[EMITEND] () {
if (this[EMITTED_END])
return
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC])
defer(() => this[EMITEND2]())
else
this[EMITEND2]()
}
[READ] () {
let threw = true
try {
const { fd, buf, offset, length, pos } = this
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](bytesRead)
threw = false
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try {
this[CLOSE](() => {})
} catch (er) {}
[EMITEND2] () {
if (this[DECODER]) {
const data = this[DECODER].end()
if (data) {
for (const p of this.pipes) {
p.dest.write(data)
}
super.emit('data', data)
}
}
}
[AWAITDRAIN] (cb) {
cb()
for (const p of this.pipes) {
p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
}
[CLOSE] (cb) {
fs.closeSync(this.fd)
cb()
// const all = await stream.collect()
collect () {
const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
}
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable)
this.noMtime = true
// const data = await stream.concat()
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
this.prefix = opt.prefix || null
// stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
this.path = normPath(readEntry.path)
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
// for await (let chunk of stream)
[ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (this[EOF])
return Promise.resolve({ done: true })
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
this.remain = readEntry.size
this.blockRemain = readEntry.startBlockSize
this.header = new Header({
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
return { next }
}
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
})
// for (let chunk of stream)
[ITERATOR] () {
const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
}
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
gid: this.portable ? null : this.gid,
mtime: this.noMtime ? null : this.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
destroy (er) {
if (this[DESTROYED]) {
if (er)
this.emit('error', er)
else
this.emit(DESTROYED)
return this
}
super.write(this.header.block)
readEntry.pipe(this)
}
this[DESTROYED] = true
[PREFIX] (path) {
return prefixPath(path, this.prefix)
}
// throw away all buffered data, it's never coming out
this.buffer.length = 0
this[BUFFERLENGTH] = 0
[MODE] (mode) {
return modeFix(mode, this.type === 'Directory', this.portable)
}
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
this.blockRemain -= writeLen
return super.write(data)
}
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
end () {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return super.end()
return this
}
})
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = WriteEntry
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
}
}
/***/ }),
......@@ -16275,6 +17574,14 @@ module.exports = require("buffer");
/***/ }),
/***/ 2081:
/***/ ((module) => {
"use strict";
module.exports = require("child_process");
/***/ }),
/***/ 6113:
/***/ ((module) => {
......
This source diff could not be displayed because it is too large. You can view the blob instead.
......@@ -520,7 +520,7 @@ minipass
ISC
The ISC License
Copyright (c) npm, Inc. and Contributors
Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above
......
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
......@@ -32,95 +32,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
});
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.generateFeaturesDocumentation = void 0;
exports.generateTemplateDocumentation = exports.generateFeaturesDocumentation = void 0;
const fs = __importStar(require("fs"));
const github = __importStar(require("@actions/github"));
const core = __importStar(require("@actions/core"));
const path = __importStar(require("path"));
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
fs.readdir(basePath, (err, files) => {
if (err) {
core.error(err.message);
core.setFailed(`failed to generate 'features' documentation ${err.message}`);
return;
}
files.forEach(f => {
core.info(`Generating docs for feature '${f}'`);
if (f !== '.' && f !== '..') {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const featureJsonPath = path.join(basePath, f, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`devcontainer-feature.json not found at path '${featureJsonPath}'`);
return;
}
let featureJson = undefined;
try {
featureJson = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${featureJsonPath}: ${err}`);
return;
}
if (!featureJson || !(featureJson === null || featureJson === void 0 ? void 0 : featureJson.id)) {
core.error(`devcontainer-feature.json for feature '${f}' does not contain an 'id'`);
return;
}
const ref = github.context.ref;
const owner = github.context.repo.owner;
const repo = github.context.repo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = featureJson === null || featureJson === void 0 ? void 0 : featureJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return ('| Options Id | Description | Type | Default Value |\n' +
'|-----|-----|-----|-----|\n' +
contents);
};
const newReadme = README_TEMPLATE.replace('#{nwo}', `${owner}/${repo}`)
.replace('#{versionTag}', versionTag)
.replace('#{featureId}', featureJson.id)
.replace('#{featureName}', featureJson.name
? `${featureJson.name} (${featureJson.id})`
: `${featureJson.id}`)
.replace('#{featureDescription}', featureJson.description ? featureJson.description : '')
.replace('#{optionsTable}', generateOptionsMarkdown());
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
});
});
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
const README_TEMPLATE = `
# #{featureName}
const utils_1 = require("./utils");
const FEATURES_README_TEMPLATE = `
# #{Name}
#{featureDescription}
#{Description}
## Example Usage
\`\`\`json
"features": {
"#{nwo}/#{featureId}@#{versionTag}": {
"#{Nwo}/#{Id}@#{VersionTag}": {
"version": "latest"
}
}
......@@ -128,9 +54,105 @@ const README_TEMPLATE = `
## Options
#{optionsTable}
#{OptionsTable}
---
_Note: This file was auto-generated from the [devcontainer-feature.json](./devcontainer-feature.json)._
_Note: This file was auto-generated from the [devcontainer-feature.json](#{RepoUrl})._
`;
const TEMPLATE_README_TEMPLATE = `
# #{Name}
#{Description}
## Options
#{OptionsTable}
`;
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, FEATURES_README_TEMPLATE, 'devcontainer-feature.json');
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
function generateTemplateDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, TEMPLATE_README_TEMPLATE, 'devcontainer-template.json');
});
}
exports.generateTemplateDocumentation = generateTemplateDocumentation;
function _generateDocumentation(basePath, readmeTemplate, metadataFile) {
return __awaiter(this, void 0, void 0, function* () {
const directories = fs.readdirSync(basePath);
yield Promise.all(directories.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a, _b, _c;
if (!f.startsWith('.')) {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const jsonPath = path.join(basePath, f, metadataFile);
if (!fs.existsSync(jsonPath)) {
core.error(`${metadataFile} not found at path '${jsonPath}'`);
return;
}
let parsedJson = undefined;
try {
parsedJson = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${jsonPath}: ${err}`);
return;
}
if (!parsedJson || !(parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.id)) {
core.error(`${metadataFile} for '${f}' does not contain an 'id'`);
return;
}
const srcInfo = (0, utils_1.getGitHubMetadata)();
const ref = srcInfo.ref;
const owner = srcInfo.owner;
const repo = srcInfo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref && ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return '| Options Id | Description | Type | Default Value |\n' + '|-----|-----|-----|-----|\n' + contents;
};
let urlToConfig = './devcontainer-feature.json';
const basePathTrimmed = basePath.startsWith('./') ? basePath.substring(2) : basePath;
if (srcInfo.owner && srcInfo.repo) {
urlToConfig = `https://github.com/${srcInfo.owner}/${srcInfo.repo}/blob/main/${basePathTrimmed}/${f}/devcontainer-feature.json`;
}
const newReadme = readmeTemplate
// Templates & Features
.replace('#{Id}', parsedJson.id)
.replace('#{Name}', parsedJson.name ? `${parsedJson.name} (${parsedJson.id})` : `${parsedJson.id}`)
.replace('#{Description}', (_a = parsedJson.description) !== null && _a !== void 0 ? _a : '')
.replace('#{OptionsTable}', generateOptionsMarkdown())
// Features Only
.replace('#{Nwo}', `${owner}/${repo}`)
.replace('#{VersionTag}', versionTag)
// Templates Only
.replace('#{ManifestName}', (_c = (_b = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.image) === null || _b === void 0 ? void 0 : _b.manifest) !== null && _c !== void 0 ? _c : '')
.replace('#{RepoUrl}', urlToConfig);
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
})));
});
}
......@@ -44,42 +44,51 @@ function run() {
core.debug('Reading input parameters...');
// Read inputs
const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true';
const shouldPublishTemplate = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldPublishTemplates = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true';
// Experimental
const shouldTagIndividualFeatures = core.getInput('tag-individual-features').toLowerCase() === 'true';
const shouldPublishToNPM = core.getInput('publish-to-npm').toLowerCase() === 'true';
const shouldPublishReleaseArtifacts = core.getInput('publish-release-artifacts').toLowerCase() === 'true';
const shouldPublishToOCI = core.getInput('publish-to-oci').toLowerCase() === 'true';
const opts = {
shouldTagIndividualFeatures,
shouldPublishToNPM,
shouldPublishReleaseArtifacts,
shouldPublishToOCI
};
const featuresBasePath = core.getInput('base-path-to-features');
const templatesBasePath = core.getInput('base-path-to-templates');
let featuresMetadata = undefined;
let templatesMetadata = undefined;
// -- Package Release Artifacts
if (shouldPublishFeatures) {
core.info('Publishing features...');
const featuresBasePath = core.getInput('base-path-to-features');
featuresMetadata = yield packageFeatures(featuresBasePath);
featuresMetadata = yield packageFeatures(featuresBasePath, opts);
}
if (shouldPublishTemplate) {
if (shouldPublishTemplates) {
core.info('Publishing template...');
const basePathToDefinitions = core.getInput('base-path-to-templates');
templatesMetadata = undefined; // TODO
yield packageTemplates(basePathToDefinitions);
templatesMetadata = yield packageTemplates(templatesBasePath);
}
if (shouldGenerateDocumentation) {
core.info('Generating documentation...');
const featuresBasePath = core.getInput('base-path-to-features');
if (featuresBasePath) {
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
}
else {
core.error("'base-path-to-features' input is required to generate documentation");
}
// TODO: base-path-to-templates
// -- Generate Documentation
if (shouldGenerateDocumentation && featuresBasePath) {
core.info('Generating documentation for features...');
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
}
// TODO: Programatically add feature/template fino with relevant metadata for UX clients.
core.info('Generation metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata);
if (shouldGenerateDocumentation && templatesBasePath) {
core.info('Generating documentation for templates...');
yield (0, generateDocs_1.generateTemplateDocumentation)(templatesBasePath);
}
// -- Programatically add feature/template metadata to collections file.
core.info('Generating metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata, opts);
});
}
function packageFeatures(basePath) {
function packageFeatures(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () {
try {
core.info(`Archiving all features in ${basePath}`);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath, opts);
core.info('Packaging features has finished.');
return metadata;
}
......@@ -94,14 +103,17 @@ function packageFeatures(basePath) {
function packageTemplates(basePath) {
return __awaiter(this, void 0, void 0, function* () {
try {
core.info(`Archiving all templated in ${basePath}`);
yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info(`Archiving all templates in ${basePath}`);
const metadata = yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info('Packaging templates has finished.');
return metadata;
}
catch (error) {
if (error instanceof Error)
if (error instanceof Error) {
core.setFailed(error.message);
}
}
return;
});
}
run();
......@@ -35,11 +35,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod };
};
Object.defineProperty(exports, "__esModule", { value: true });
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.addCollectionsMetadataFile = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.pushCollectionsMetadataToOCI = exports.addCollectionsMetadataFile = exports.getGitHubMetadata = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
const github = __importStar(require("@actions/github"));
const tar = __importStar(require("tar"));
const fs = __importStar(require("fs"));
const core = __importStar(require("@actions/core"));
const child_process = __importStar(require("child_process"));
const util_1 = require("util");
const path_1 = __importDefault(require("path"));
exports.readLocalFile = (0, util_1.promisify)(fs.readFile);
......@@ -62,23 +63,83 @@ function tarDirectory(path, tgzName) {
});
}
exports.tarDirectory = tarDirectory;
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
function getGitHubMetadata() {
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
}
return sourceInformation;
}
exports.getGitHubMetadata = getGitHubMetadata;
function tagFeatureAtVersion(featureMetaData) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
source: 'github',
const featureId = featureMetaData.id;
const featureVersion = featureMetaData.version;
const tagName = `${featureId}_v${featureVersion}`;
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
// Setup Octokit client
const octokit = github.getOctokit(githubToken);
// Use octokit to get all tags for this repo
const tags = yield octokit.rest.repos.listTags({
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
// See if tags for this release was already created.
const tagExists = tags.data.some(tag => tag.name === tagName);
if (tagExists) {
core.info(`Tag ${tagName} already exists. Skipping...`);
return;
}
// Create tag
const createdTag = yield octokit.rest.git.createTag({
tag: tagName,
message: `Feature ${featureId} version ${featureVersion}`,
object: github.context.sha,
type: 'commit',
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
if (createdTag.status === 201) {
core.info(`Tagged '${tagName}'`);
}
else {
core.setFailed(`Failed to tag '${tagName}'`);
return;
}
// Create reference to tag
const createdRef = yield octokit.rest.git.createRef({
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
ref: `refs/tags/${tagName}`,
sha: createdTag.data.sha
});
if (createdRef.status === 201) {
core.info(`Created reference for '${tagName}'`);
}
else {
core.setFailed(`Failed to reference of tag '${tagName}'`);
return;
}
});
}
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata, opts) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
const sourceInformation = getGitHubMetadata();
const metadata = {
sourceInformation,
features: featuresMetadata || [],
......@@ -86,27 +147,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
};
// Write to the file
yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4));
if (opts.shouldPublishToOCI) {
pushCollectionsMetadataToOCI(p);
}
});
}
exports.addCollectionsMetadataFile = addCollectionsMetadataFile;
function getFeaturesAndPackage(basePath) {
function pushArtifactToOCI(version, featureName, artifactPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const versions = [version, '1.0', '1', 'latest']; // TODO: Generate semantic versions from 'version'
const sourceInfo = getGitHubMetadata();
yield Promise.all(versions.map((v) => __awaiter(this, void 0, void 0, function* () {
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}/${featureName}:${v}`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${artifactPath}:application/vnd.devcontainers.layer.v1+tar`;
yield exec(cmd);
core.info(`Pushed artifact to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push '${ociRepo}': ${error.message}`);
}
})));
});
}
function pushCollectionsMetadataToOCI(collectionJsonPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const sourceInfo = getGitHubMetadata();
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}:latest`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${collectionJsonPath}:application/vnd.devcontainers.collection.layer.v1+json`;
yield exec(cmd);
core.info(`Pushed collection metadata to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push collection metadata '${ociRepo}': ${error.message}`);
}
});
}
exports.pushCollectionsMetadataToOCI = pushCollectionsMetadataToOCI;
function loginToGHCR() {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
try {
yield exec(`oras login ghcr.io -u USERNAME -p ${githubToken}`);
core.info('Oras logged in successfully!');
}
catch (error) {
if (error instanceof Error)
core.setFailed(` Oras login failed!`);
}
});
}
function getFeaturesAndPackage(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () {
const { shouldPublishToNPM, shouldTagIndividualFeatures, shouldPublishReleaseArtifacts, shouldPublishToOCI } = opts;
const featureDirs = fs.readdirSync(basePath);
let metadatas = [];
const exec = (0, util_1.promisify)(child_process.exec);
if (shouldPublishToOCI) {
yield loginToGHCR();
}
yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a;
core.info(`feature ==> ${f}`);
if (f !== '.' && f !== '..') {
if (!f.startsWith('.')) {
const featureFolder = path_1.default.join(basePath, f);
const archiveName = `${f}.tgz`;
yield tarDirectory(`${basePath}/${f}`, archiveName);
const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`Feature ${f} is missing a devcontainer-feature.json`);
core.error(`Feature '${f}' is missing a devcontainer-feature.json`);
core.setFailed('All features must have a devcontainer-feature.json');
return;
}
const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
if (!featureMetadata.id || !featureMetadata.version) {
core.error(`Feature '${f}' is must defined an id and version`);
core.setFailed('Incomplete devcontainer-feature.json');
}
metadatas.push(featureMetadata);
const sourceInfo = getGitHubMetadata();
if (!sourceInfo.owner) {
core.setFailed('Could not determine repository owner.');
return;
}
const archiveName = `${f}.tgz`;
// ---- PUBLISH RELEASE ARTIFACTS (classic method) ----
if (shouldPublishReleaseArtifacts || shouldPublishToOCI) {
core.info(`** Tar'ing feature`);
yield tarDirectory(featureFolder, archiveName);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToOCI) {
core.info(`** Publishing to OCI`);
// TODO: CHECK IF THE FEATURE IS ALREADY PUBLISHED UNDER GIVEN TAG
yield pushArtifactToOCI(featureMetadata.version, f, archiveName);
}
// ---- TAG INDIVIDUAL FEATURES ----
if (shouldTagIndividualFeatures) {
core.info(`** Tagging individual feature`);
yield tagFeatureAtVersion(featureMetadata);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToNPM) {
core.info(`** Publishing to NPM`);
// Adds a package.json file to the feature folder
const packageJsonPath = path_1.default.join(featureFolder, 'package.json');
// if (!sourceInfo.tag) {
// core.error(`Feature ${f} is missing a tag! Cannot publish to NPM.`);
// core.setFailed('All features published to NPM must be tagged with a version');
// }
const packageJsonObject = {
name: `@${sourceInfo.owner}/${f}`,
version: featureMetadata.version,
description: `${(_a = featureMetadata.description) !== null && _a !== void 0 ? _a : 'My cool feature'}`,
author: `${sourceInfo.owner}`,
keywords: ['devcontainer-features']
};
yield (0, exports.writeLocalFile)(packageJsonPath, JSON.stringify(packageJsonObject, undefined, 4));
core.info(`Feature Folder is: ${featureFolder}`);
// Run npm pack, which 'tars' the folder
const packageName = yield exec(`npm pack ./${featureFolder}`);
if (packageName.stderr) {
core.error(`${packageName.stderr.toString()}`);
}
const publishOutput = yield exec(`npm publish --access public "${packageName.stdout.trim()}"`);
core.info(publishOutput.stdout);
if (publishOutput.stderr) {
core.error(`${publishOutput.stderr}`);
}
}
}
})));
if (metadatas.length === 0) {
......@@ -119,23 +301,29 @@ function getFeaturesAndPackage(basePath) {
exports.getFeaturesAndPackage = getFeaturesAndPackage;
function getTemplatesAndPackage(basePath) {
return __awaiter(this, void 0, void 0, function* () {
let archives = [];
fs.readdir(basePath, (err, files) => {
if (err) {
core.error(err.message);
core.setFailed(`failed to get list of templates: ${err.message}`);
return;
}
files.forEach(file => {
core.info(`template ==> ${file}`);
if (file !== '.' && file !== '..') {
const archiveName = `devcontainer-definition-${file}.tgz`;
tarDirectory(`${basePath}/${file}`, archiveName);
archives.push(archiveName);
const templateDirs = fs.readdirSync(basePath);
let metadatas = [];
yield Promise.all(templateDirs.map((t) => __awaiter(this, void 0, void 0, function* () {
core.info(`template ==> ${t}`);
if (!t.startsWith('.')) {
const templateFolder = path_1.default.join(basePath, t);
const archiveName = `devcontainer-template-${t}.tgz`;
// await tarDirectory(templateFolder, archiveName);
const templateJsonPath = path_1.default.join(templateFolder, 'devcontainer-template.json');
if (!fs.existsSync(templateJsonPath)) {
core.error(`Template '${t}' is missing a devcontainer-template.json`);
core.setFailed('All templates must have a devcontainer-template.json');
return;
}
});
});
return archives;
const templateMetadata = JSON.parse(fs.readFileSync(templateJsonPath, 'utf8'));
metadatas.push(templateMetadata);
}
})));
if (metadatas.length === 0) {
core.setFailed('No templates found');
return;
}
return metadatas;
});
}
exports.getTemplatesAndPackage = getTemplatesAndPackage;
name: "(Release) Release dev container features (v2)"
on:
push:
tags:
- "v*"
workflow_dispatch:
jobs:
deploy:
if: ${{ github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Generate tgz
- name: Install Oras
run: |
curl -LO https://github.com/oras-project/oras/releases/download/v0.13.0/oras_0.13.0_linux_amd64.tar.gz
mkdir -p oras-install/
tar -zxf oras_0.13.0_*.tar.gz -C oras-install/
mv oras-install/oras /usr/local/bin/
rm -rf oras_0.13.0_*.tar.gz oras-install/
- name: "Publish features to OCI"
uses: ./.github/devcontainers-action # TODO: Once 'devcontainers/action' is published, use that.
with:
publish-features: "true"
publish-to-oci: "true"
base-path-to-features: "./src"
- name: Remove temporary devcontainer-cli # TODO: Temporary
run: rm -rf ./devcontainer-cli-0*
- name: Get or Create Release at current tag
uses: ncipollo/release-action@v1
with:
allowUpdates: true # Lets us upload our own artifact from previous step
artifactErrorsFailBuild: true
artifacts: "./*.tgz,devcontainer-collection.json"
token: ${{ secrets.GITHUB_TOKEN }}
env:
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
{
"id": "anaconda",
"version": "1.0.0",
"name": "Anaconda",
"options": {
"version": {
......@@ -14,9 +15,5 @@
"containerEnv": {
"CONDA_DIR": "/usr/local/conda",
"PATH": "${PATH}:${CONDA_DIR}/bin:"
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "aws-cli",
"version": "1.0.0",
"name": "AWS CLI",
"description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": {
......@@ -14,9 +15,5 @@
},
"extensions": [
"AmazonWebServices.aws-toolkit-vscode"
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "azure-cli",
"version": "1.0.0",
"name": "Azure CLI",
"description": "Installs the Azure CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": {
......@@ -14,9 +15,5 @@
},
"extensions": [
"ms-vscode.azurecli"
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "common-utils",
"name": "Common Debian Utilities",
"version": "1.0.0",
"description": "Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-root user.",
"options": {
"install_Zsh": {
......@@ -55,9 +56,5 @@
},
"extensions": [
"ms-dotnettools.csharp"
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "desktop-lite",
"version": "1.0.0",
"name": "Light-weight Desktop",
"description": "Adds a lightweight Fluxbox based desktop to the container that can be accessed using a VNC viewer or the web. GUI-based commands executed from the built-in VS code terminal will open on the desktop automatically.",
"options": {
......@@ -50,9 +51,5 @@
"entrypoint": "/usr/local/share/desktop-init.sh",
"containerEnv": {
"DISPLAY": ":1"
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "docker-from-docker",
"version": "1.0.0",
"name": "Docker (Docker-from-Docker)",
"descripton": "Re-use the host docker socket, adding the Docker CLI to a container. Feature invokes a script to enable using a forwarded Docker socket within a container to run Docker commands.",
"options": {
......@@ -41,9 +42,5 @@
"target": "/var/run/docker-host.sock",
"type": "bind"
}
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "docker-in-docker",
"version": "1.0.0",
"name": "Docker (Docker-in-Docker)",
"description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.",
"options": {
......@@ -42,9 +43,5 @@
"target": "/var/lib/docker",
"type": "volume"
}
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "dotnet",
"version": "1.0.0",
"name": "Dotnet CLI",
"description": "Installs the .NET CLI. Provides option of installing sdk or runtime, and option of versions to install. Uses latest version of .NET sdk as defaults to install.",
"options": {
......
{
"id": "git-lfs",
"version": "1.0.0",
"name": "Git Large File Support (LFS)",
"description": "Installs Git Large File Support (Git LFS) along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like git and curl.",
"options": {
......@@ -12,9 +13,5 @@
"default": "latest",
"description": "Select version of Git LFS to install"
}
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "git",
"version": "1.0.0",
"name": "Git (from source)",
"description": "Install an up-to-date version of Git, built from source as needed. Useful for when you want the latest and greatest features. Auto-detects latest stable version and installs needed dependencies.",
"options": {
......@@ -17,9 +18,5 @@
"default": true,
"description": "Install from PPA if available"
}
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "github-cli",
"version": "1.0.0",
"name": "GitHub CLI",
"description": "Installs the GitHub CLI. Auto-detects latest version and installs needed dependencies.",
"options": {
......@@ -12,9 +13,5 @@
"default": "latest",
"description": "Select version of the GitHub CLI, if not latest."
}
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "go",
"version": "1.0.0",
"name": "Go",
"description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.",
"options": {
......@@ -28,9 +29,5 @@
],
"securityOpt": [
"seccomp=unconfined"
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "hugo",
"version": "1.0.0",
"name": "Hugo",
"options": {
"version": {
......@@ -14,9 +15,5 @@
"containerEnv": {
"HUGO_DIR": "/usr/local/hugo",
"PATH": "${HUGO_DIR}/bin:${PATH}"
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "java",
"version": "1.0.0",
"name": "Java (via SDKMAN!)",
"description": "Installs Java, SDKMAN! (if not installed), and needed dependencies.",
"options": {
......
{
"id": "kubectl-helm-minikube",
"version": "1.0.0",
"name": "Kubectl, Helm, and Minkube",
"description": "Installs latest version of kubectl, Helm, and optionally minikube. Auto-detects latest versions and installs needed dependencies.",
"options": {
......@@ -41,9 +42,5 @@
"target": "/home/vscode/.minikube",
"type": "volume"
}
],
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
]
}
{
"id": "node",
"version": "1.0.0",
"name": "Node.js (via nvm) and yarn",
"description": "Installs Node.js, nvm, yarn, and needed dependencies.",
"options": {
......
{
"id": "oryx",
"version": "1.0.0",
"name": "Oryx",
"description": "Installs the oryx CLI",
"containerEnv": {
......
{
"id": "php",
"version": "1.0.0",
"name": "PHP",
"options": {
"version": {
......
{
"id": "powershell",
"version": "1.0.0",
"name": "PowerShell",
"description": "Installs PowerShell along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": {
......@@ -13,9 +14,5 @@
"default": "latest",
"description": "Select or enter a version of PowerShell."
}
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "python",
"version": "1.0.0",
"name": "Python",
"description": "Installs the provided version of Python, as well as PIPX, and other common Python utilities. JupyterLab is conditionally installed with the python feature. Note: May require source code compilation.",
"options": {
......
{
"id": "ruby",
"version": "1.0.0",
"name": "Ruby (via rvm)",
"description": "Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.",
"options": {
......
{
"id": "rust",
"version": "1.0.0",
"name": "Rust",
"description": "Installs Rust, common Rust utilities, and their required dependencies",
"options": {
......@@ -50,9 +51,5 @@
"**/target/**": true
},
"rust-analyzer.checkOnSave.command": "clippy"
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
{
"id": "sshd",
"version": "1.0.0",
"name": "SSH server",
"description": "Adds a SSH server into a container so that you can use an external terminal, sftp, or SSHFS to interact with it.",
"options": {
......@@ -12,9 +13,5 @@
"description": "Currently unused."
}
},
"entrypoint": "/usr/local/share/ssh-init.sh",
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
"entrypoint": "/usr/local/share/ssh-init.sh"
}
{
"id": "terraform",
"version": "1.0.0",
"name": "Terraform, tflint, and TFGrunt",
"description": "Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects latest version and installs needed dependencies.",
"options": {
......@@ -42,9 +43,5 @@
"args": []
},
"azureTerraform.terminal": "integrated"
},
"install": {
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
}
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment