Unverified Commit 542963cf authored by Samruddhi Khandale's avatar Samruddhi Khandale Committed by GitHub

Prep: republish features to ghcr.io (#78)

* syncing action

* modify release.yaml

* version update ; remove "install"

* add 'latest'

* add workflow condition
parent 69d3df5f
...@@ -22,15 +22,33 @@ inputs: ...@@ -22,15 +22,33 @@ inputs:
# 'features' options # 'features' options
base-path-to-features: base-path-to-features:
required: false required: false
default: './features/src' default: ''
description: "Relative path to the 'src' folder containing dev container 'feature(s)'" description: "Relative path to the 'src' folder containing dev container 'feature(s)'"
# 'template' options # 'template' options
base-path-to-templates: base-path-to-templates:
required: false required: false
default: './templates/src' default: ''
description: "Relative path to the folder containing dev container 'template(s)'" description: "Relative path to the folder containing dev container 'template(s)'"
# EXPERIMENTAL
tag-individual-features:
required: false
default: "false"
description: "Tag individual features"
publish-to-npm:
required: false
default: "false"
description: "Should publish features to NPM?"
publish-release-artifacts:
required: false
default: "false"
description: "Publish release artifacts (classic)"
publish-to-oci:
required: false
default: "false"
description: "Publish to OCI?"
runs: runs:
using: 'node16' using: 'node16'
main: 'dist/index.js' main: 'dist/index.js'
...@@ -39,95 +39,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge ...@@ -39,95 +39,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.generateFeaturesDocumentation = void 0; exports.generateTemplateDocumentation = exports.generateFeaturesDocumentation = void 0;
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
const github = __importStar(__nccwpck_require__(5438));
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const path = __importStar(__nccwpck_require__(1017)); const path = __importStar(__nccwpck_require__(1017));
function generateFeaturesDocumentation(basePath) { const utils_1 = __nccwpck_require__(918);
return __awaiter(this, void 0, void 0, function* () { const FEATURES_README_TEMPLATE = `
fs.readdir(basePath, (err, files) => { # #{Name}
if (err) {
core.error(err.message);
core.setFailed(`failed to generate 'features' documentation ${err.message}`);
return;
}
files.forEach(f => {
core.info(`Generating docs for feature '${f}'`);
if (f !== '.' && f !== '..') {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const featureJsonPath = path.join(basePath, f, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`devcontainer-feature.json not found at path '${featureJsonPath}'`);
return;
}
let featureJson = undefined;
try {
featureJson = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${featureJsonPath}: ${err}`);
return;
}
if (!featureJson || !(featureJson === null || featureJson === void 0 ? void 0 : featureJson.id)) {
core.error(`devcontainer-feature.json for feature '${f}' does not contain an 'id'`);
return;
}
const ref = github.context.ref;
const owner = github.context.repo.owner;
const repo = github.context.repo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = featureJson === null || featureJson === void 0 ? void 0 : featureJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return ('| Options Id | Description | Type | Default Value |\n' +
'|-----|-----|-----|-----|\n' +
contents);
};
const newReadme = README_TEMPLATE.replace('#{nwo}', `${owner}/${repo}`)
.replace('#{versionTag}', versionTag)
.replace('#{featureId}', featureJson.id)
.replace('#{featureName}', featureJson.name
? `${featureJson.name} (${featureJson.id})`
: `${featureJson.id}`)
.replace('#{featureDescription}', featureJson.description ? featureJson.description : '')
.replace('#{optionsTable}', generateOptionsMarkdown());
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
});
});
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
const README_TEMPLATE = `
# #{featureName}
#{featureDescription} #{Description}
## Example Usage ## Example Usage
\`\`\`json \`\`\`json
"features": { "features": {
"#{nwo}/#{featureId}@#{versionTag}": { "#{Nwo}/#{Id}@#{VersionTag}": {
"version": "latest" "version": "latest"
} }
} }
...@@ -135,12 +61,108 @@ const README_TEMPLATE = ` ...@@ -135,12 +61,108 @@ const README_TEMPLATE = `
## Options ## Options
#{optionsTable} #{OptionsTable}
--- ---
_Note: This file was auto-generated from the [devcontainer-feature.json](./devcontainer-feature.json)._ _Note: This file was auto-generated from the [devcontainer-feature.json](#{RepoUrl})._
`;
const TEMPLATE_README_TEMPLATE = `
# #{Name}
#{Description}
## Options
#{OptionsTable}
`; `;
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, FEATURES_README_TEMPLATE, 'devcontainer-feature.json');
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
function generateTemplateDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, TEMPLATE_README_TEMPLATE, 'devcontainer-template.json');
});
}
exports.generateTemplateDocumentation = generateTemplateDocumentation;
function _generateDocumentation(basePath, readmeTemplate, metadataFile) {
return __awaiter(this, void 0, void 0, function* () {
const directories = fs.readdirSync(basePath);
yield Promise.all(directories.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a, _b, _c;
if (!f.startsWith('.')) {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const jsonPath = path.join(basePath, f, metadataFile);
if (!fs.existsSync(jsonPath)) {
core.error(`${metadataFile} not found at path '${jsonPath}'`);
return;
}
let parsedJson = undefined;
try {
parsedJson = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${jsonPath}: ${err}`);
return;
}
if (!parsedJson || !(parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.id)) {
core.error(`${metadataFile} for '${f}' does not contain an 'id'`);
return;
}
const srcInfo = (0, utils_1.getGitHubMetadata)();
const ref = srcInfo.ref;
const owner = srcInfo.owner;
const repo = srcInfo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref && ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return '| Options Id | Description | Type | Default Value |\n' + '|-----|-----|-----|-----|\n' + contents;
};
let urlToConfig = './devcontainer-feature.json';
const basePathTrimmed = basePath.startsWith('./') ? basePath.substring(2) : basePath;
if (srcInfo.owner && srcInfo.repo) {
urlToConfig = `https://github.com/${srcInfo.owner}/${srcInfo.repo}/blob/main/${basePathTrimmed}/${f}/devcontainer-feature.json`;
}
const newReadme = readmeTemplate
// Templates & Features
.replace('#{Id}', parsedJson.id)
.replace('#{Name}', parsedJson.name ? `${parsedJson.name} (${parsedJson.id})` : `${parsedJson.id}`)
.replace('#{Description}', (_a = parsedJson.description) !== null && _a !== void 0 ? _a : '')
.replace('#{OptionsTable}', generateOptionsMarkdown())
// Features Only
.replace('#{Nwo}', `${owner}/${repo}`)
.replace('#{VersionTag}', versionTag)
// Templates Only
.replace('#{ManifestName}', (_c = (_b = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.image) === null || _b === void 0 ? void 0 : _b.manifest) !== null && _c !== void 0 ? _c : '')
.replace('#{RepoUrl}', urlToConfig);
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
})));
});
}
/***/ }), /***/ }),
...@@ -195,42 +217,51 @@ function run() { ...@@ -195,42 +217,51 @@ function run() {
core.debug('Reading input parameters...'); core.debug('Reading input parameters...');
// Read inputs // Read inputs
const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true'; const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true';
const shouldPublishTemplate = core.getInput('publish-templates').toLowerCase() === 'true'; const shouldPublishTemplates = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true'; const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true';
// Experimental
const shouldTagIndividualFeatures = core.getInput('tag-individual-features').toLowerCase() === 'true';
const shouldPublishToNPM = core.getInput('publish-to-npm').toLowerCase() === 'true';
const shouldPublishReleaseArtifacts = core.getInput('publish-release-artifacts').toLowerCase() === 'true';
const shouldPublishToOCI = core.getInput('publish-to-oci').toLowerCase() === 'true';
const opts = {
shouldTagIndividualFeatures,
shouldPublishToNPM,
shouldPublishReleaseArtifacts,
shouldPublishToOCI
};
const featuresBasePath = core.getInput('base-path-to-features');
const templatesBasePath = core.getInput('base-path-to-templates');
let featuresMetadata = undefined; let featuresMetadata = undefined;
let templatesMetadata = undefined; let templatesMetadata = undefined;
// -- Package Release Artifacts
if (shouldPublishFeatures) { if (shouldPublishFeatures) {
core.info('Publishing features...'); core.info('Publishing features...');
const featuresBasePath = core.getInput('base-path-to-features'); featuresMetadata = yield packageFeatures(featuresBasePath, opts);
featuresMetadata = yield packageFeatures(featuresBasePath);
} }
if (shouldPublishTemplate) { if (shouldPublishTemplates) {
core.info('Publishing template...'); core.info('Publishing template...');
const basePathToDefinitions = core.getInput('base-path-to-templates'); templatesMetadata = yield packageTemplates(templatesBasePath);
templatesMetadata = undefined; // TODO
yield packageTemplates(basePathToDefinitions);
} }
if (shouldGenerateDocumentation) { // -- Generate Documentation
core.info('Generating documentation...'); if (shouldGenerateDocumentation && featuresBasePath) {
const featuresBasePath = core.getInput('base-path-to-features'); core.info('Generating documentation for features...');
if (featuresBasePath) { yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath); }
} if (shouldGenerateDocumentation && templatesBasePath) {
else { core.info('Generating documentation for templates...');
core.error("'base-path-to-features' input is required to generate documentation"); yield (0, generateDocs_1.generateTemplateDocumentation)(templatesBasePath);
}
// TODO: base-path-to-templates
} }
// TODO: Programatically add feature/template fino with relevant metadata for UX clients. // -- Programatically add feature/template metadata to collections file.
core.info('Generation metadata file: devcontainer-collection.json'); core.info('Generating metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata); yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata, opts);
}); });
} }
function packageFeatures(basePath) { function packageFeatures(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
core.info(`Archiving all features in ${basePath}`); core.info(`Archiving all features in ${basePath}`);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath); const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath, opts);
core.info('Packaging features has finished.'); core.info('Packaging features has finished.');
return metadata; return metadata;
} }
...@@ -245,14 +276,17 @@ function packageFeatures(basePath) { ...@@ -245,14 +276,17 @@ function packageFeatures(basePath) {
function packageTemplates(basePath) { function packageTemplates(basePath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
core.info(`Archiving all templated in ${basePath}`); core.info(`Archiving all templates in ${basePath}`);
yield (0, utils_1.getTemplatesAndPackage)(basePath); const metadata = yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info('Packaging templates has finished.'); core.info('Packaging templates has finished.');
return metadata;
} }
catch (error) { catch (error) {
if (error instanceof Error) if (error instanceof Error) {
core.setFailed(error.message); core.setFailed(error.message);
}
} }
return;
}); });
} }
run(); run();
...@@ -301,11 +335,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) { ...@@ -301,11 +335,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.addCollectionsMetadataFile = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0; exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.pushCollectionsMetadataToOCI = exports.addCollectionsMetadataFile = exports.getGitHubMetadata = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
const github = __importStar(__nccwpck_require__(5438)); const github = __importStar(__nccwpck_require__(5438));
const tar = __importStar(__nccwpck_require__(4674)); const tar = __importStar(__nccwpck_require__(4674));
const fs = __importStar(__nccwpck_require__(7147)); const fs = __importStar(__nccwpck_require__(7147));
const core = __importStar(__nccwpck_require__(2186)); const core = __importStar(__nccwpck_require__(2186));
const child_process = __importStar(__nccwpck_require__(2081));
const util_1 = __nccwpck_require__(3837); const util_1 = __nccwpck_require__(3837);
const path_1 = __importDefault(__nccwpck_require__(1017)); const path_1 = __importDefault(__nccwpck_require__(1017));
exports.readLocalFile = (0, util_1.promisify)(fs.readFile); exports.readLocalFile = (0, util_1.promisify)(fs.readFile);
...@@ -328,23 +363,83 @@ function tarDirectory(path, tgzName) { ...@@ -328,23 +363,83 @@ function tarDirectory(path, tgzName) {
}); });
} }
exports.tarDirectory = tarDirectory; exports.tarDirectory = tarDirectory;
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) { function getGitHubMetadata() {
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
}
return sourceInformation;
}
exports.getGitHubMetadata = getGitHubMetadata;
function tagFeatureAtVersion(featureMetaData) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json'); const featureId = featureMetaData.id;
// Insert github repo metadata const featureVersion = featureMetaData.version;
const ref = github.context.ref; const tagName = `${featureId}_v${featureVersion}`;
let sourceInformation = { // Get GITHUB_TOKEN from environment
source: 'github', const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
// Setup Octokit client
const octokit = github.getOctokit(githubToken);
// Use octokit to get all tags for this repo
const tags = yield octokit.rest.repos.listTags({
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
// See if tags for this release was already created.
const tagExists = tags.data.some(tag => tag.name === tagName);
if (tagExists) {
core.info(`Tag ${tagName} already exists. Skipping...`);
return;
}
// Create tag
const createdTag = yield octokit.rest.git.createTag({
tag: tagName,
message: `Feature ${featureId} version ${featureVersion}`,
object: github.context.sha,
type: 'commit',
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
if (createdTag.status === 201) {
core.info(`Tagged '${tagName}'`);
}
else {
core.setFailed(`Failed to tag '${tagName}'`);
return;
}
// Create reference to tag
const createdRef = yield octokit.rest.git.createRef({
owner: github.context.repo.owner, owner: github.context.repo.owner,
repo: github.context.repo.repo, repo: github.context.repo.repo,
ref, ref: `refs/tags/${tagName}`,
sha: github.context.sha sha: createdTag.data.sha
}; });
// Add tag if parseable if (createdRef.status === 201) {
if (ref.includes('refs/tags/')) { core.info(`Created reference for '${tagName}'`);
const tag = ref.replace('refs/tags/', ''); }
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag }); else {
core.setFailed(`Failed to reference of tag '${tagName}'`);
return;
} }
});
}
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata, opts) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
const sourceInformation = getGitHubMetadata();
const metadata = { const metadata = {
sourceInformation, sourceInformation,
features: featuresMetadata || [], features: featuresMetadata || [],
...@@ -352,27 +447,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) { ...@@ -352,27 +447,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
}; };
// Write to the file // Write to the file
yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4)); yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4));
if (opts.shouldPublishToOCI) {
pushCollectionsMetadataToOCI(p);
}
}); });
} }
exports.addCollectionsMetadataFile = addCollectionsMetadataFile; exports.addCollectionsMetadataFile = addCollectionsMetadataFile;
function getFeaturesAndPackage(basePath) { function pushArtifactToOCI(version, featureName, artifactPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const versions = [version, '1.0', '1', 'latest']; // TODO: Generate semantic versions from 'version'
const sourceInfo = getGitHubMetadata();
yield Promise.all(versions.map((v) => __awaiter(this, void 0, void 0, function* () {
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}/${featureName}:${v}`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${artifactPath}:application/vnd.devcontainers.layer.v1+tar`;
yield exec(cmd);
core.info(`Pushed artifact to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push '${ociRepo}': ${error.message}`);
}
})));
});
}
function pushCollectionsMetadataToOCI(collectionJsonPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const sourceInfo = getGitHubMetadata();
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}:latest`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${collectionJsonPath}:application/vnd.devcontainers.collection.layer.v1+json`;
yield exec(cmd);
core.info(`Pushed collection metadata to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push collection metadata '${ociRepo}': ${error.message}`);
}
});
}
exports.pushCollectionsMetadataToOCI = pushCollectionsMetadataToOCI;
function loginToGHCR() {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
try {
yield exec(`oras login ghcr.io -u USERNAME -p ${githubToken}`);
core.info('Oras logged in successfully!');
}
catch (error) {
if (error instanceof Error)
core.setFailed(` Oras login failed!`);
}
});
}
function getFeaturesAndPackage(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const { shouldPublishToNPM, shouldTagIndividualFeatures, shouldPublishReleaseArtifacts, shouldPublishToOCI } = opts;
const featureDirs = fs.readdirSync(basePath); const featureDirs = fs.readdirSync(basePath);
let metadatas = []; let metadatas = [];
const exec = (0, util_1.promisify)(child_process.exec);
if (shouldPublishToOCI) {
yield loginToGHCR();
}
yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () { yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a;
core.info(`feature ==> ${f}`); core.info(`feature ==> ${f}`);
if (f !== '.' && f !== '..') { if (!f.startsWith('.')) {
const featureFolder = path_1.default.join(basePath, f); const featureFolder = path_1.default.join(basePath, f);
const archiveName = `${f}.tgz`;
yield tarDirectory(`${basePath}/${f}`, archiveName);
const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json'); const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) { if (!fs.existsSync(featureJsonPath)) {
core.error(`Feature ${f} is missing a devcontainer-feature.json`); core.error(`Feature '${f}' is missing a devcontainer-feature.json`);
core.setFailed('All features must have a devcontainer-feature.json'); core.setFailed('All features must have a devcontainer-feature.json');
return; return;
} }
const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8')); const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
if (!featureMetadata.id || !featureMetadata.version) {
core.error(`Feature '${f}' is must defined an id and version`);
core.setFailed('Incomplete devcontainer-feature.json');
}
metadatas.push(featureMetadata); metadatas.push(featureMetadata);
const sourceInfo = getGitHubMetadata();
if (!sourceInfo.owner) {
core.setFailed('Could not determine repository owner.');
return;
}
const archiveName = `${f}.tgz`;
// ---- PUBLISH RELEASE ARTIFACTS (classic method) ----
if (shouldPublishReleaseArtifacts || shouldPublishToOCI) {
core.info(`** Tar'ing feature`);
yield tarDirectory(featureFolder, archiveName);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToOCI) {
core.info(`** Publishing to OCI`);
// TODO: CHECK IF THE FEATURE IS ALREADY PUBLISHED UNDER GIVEN TAG
yield pushArtifactToOCI(featureMetadata.version, f, archiveName);
}
// ---- TAG INDIVIDUAL FEATURES ----
if (shouldTagIndividualFeatures) {
core.info(`** Tagging individual feature`);
yield tagFeatureAtVersion(featureMetadata);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToNPM) {
core.info(`** Publishing to NPM`);
// Adds a package.json file to the feature folder
const packageJsonPath = path_1.default.join(featureFolder, 'package.json');
// if (!sourceInfo.tag) {
// core.error(`Feature ${f} is missing a tag! Cannot publish to NPM.`);
// core.setFailed('All features published to NPM must be tagged with a version');
// }
const packageJsonObject = {
name: `@${sourceInfo.owner}/${f}`,
version: featureMetadata.version,
description: `${(_a = featureMetadata.description) !== null && _a !== void 0 ? _a : 'My cool feature'}`,
author: `${sourceInfo.owner}`,
keywords: ['devcontainer-features']
};
yield (0, exports.writeLocalFile)(packageJsonPath, JSON.stringify(packageJsonObject, undefined, 4));
core.info(`Feature Folder is: ${featureFolder}`);
// Run npm pack, which 'tars' the folder
const packageName = yield exec(`npm pack ./${featureFolder}`);
if (packageName.stderr) {
core.error(`${packageName.stderr.toString()}`);
}
const publishOutput = yield exec(`npm publish --access public "${packageName.stdout.trim()}"`);
core.info(publishOutput.stdout);
if (publishOutput.stderr) {
core.error(`${publishOutput.stderr}`);
}
}
} }
}))); })));
if (metadatas.length === 0) { if (metadatas.length === 0) {
...@@ -385,23 +601,29 @@ function getFeaturesAndPackage(basePath) { ...@@ -385,23 +601,29 @@ function getFeaturesAndPackage(basePath) {
exports.getFeaturesAndPackage = getFeaturesAndPackage; exports.getFeaturesAndPackage = getFeaturesAndPackage;
function getTemplatesAndPackage(basePath) { function getTemplatesAndPackage(basePath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let archives = []; const templateDirs = fs.readdirSync(basePath);
fs.readdir(basePath, (err, files) => { let metadatas = [];
if (err) { yield Promise.all(templateDirs.map((t) => __awaiter(this, void 0, void 0, function* () {
core.error(err.message); core.info(`template ==> ${t}`);
core.setFailed(`failed to get list of templates: ${err.message}`); if (!t.startsWith('.')) {
return; const templateFolder = path_1.default.join(basePath, t);
} const archiveName = `devcontainer-template-${t}.tgz`;
files.forEach(file => { // await tarDirectory(templateFolder, archiveName);
core.info(`template ==> ${file}`); const templateJsonPath = path_1.default.join(templateFolder, 'devcontainer-template.json');
if (file !== '.' && file !== '..') { if (!fs.existsSync(templateJsonPath)) {
const archiveName = `devcontainer-definition-${file}.tgz`; core.error(`Template '${t}' is missing a devcontainer-template.json`);
tarDirectory(`${basePath}/${file}`, archiveName); core.setFailed('All templates must have a devcontainer-template.json');
archives.push(archiveName); return;
} }
}); const templateMetadata = JSON.parse(fs.readFileSync(templateJsonPath, 'utf8'));
}); metadatas.push(templateMetadata);
return archives; }
})));
if (metadatas.length === 0) {
core.setFailed('No templates found');
return;
}
return metadatas;
}); });
} }
exports.getTemplatesAndPackage = getTemplatesAndPackage; exports.getTemplatesAndPackage = getTemplatesAndPackage;
...@@ -823,6 +1045,23 @@ function getIDToken(aud) { ...@@ -823,6 +1045,23 @@ function getIDToken(aud) {
}); });
} }
exports.getIDToken = getIDToken; exports.getIDToken = getIDToken;
/**
* Summary exports
*/
var summary_1 = __nccwpck_require__(1327);
Object.defineProperty(exports, "summary", ({ enumerable: true, get: function () { return summary_1.summary; } }));
/**
* @deprecated use core.summary
*/
var summary_2 = __nccwpck_require__(1327);
Object.defineProperty(exports, "markdownSummary", ({ enumerable: true, get: function () { return summary_2.markdownSummary; } }));
/**
* Path exports
*/
var path_utils_1 = __nccwpck_require__(2981);
Object.defineProperty(exports, "toPosixPath", ({ enumerable: true, get: function () { return path_utils_1.toPosixPath; } }));
Object.defineProperty(exports, "toWin32Path", ({ enumerable: true, get: function () { return path_utils_1.toWin32Path; } }));
Object.defineProperty(exports, "toPlatformPath", ({ enumerable: true, get: function () { return path_utils_1.toPlatformPath; } }));
//# sourceMappingURL=core.js.map //# sourceMappingURL=core.js.map
/***/ }), /***/ }),
...@@ -892,8 +1131,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge ...@@ -892,8 +1131,8 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.OidcClient = void 0; exports.OidcClient = void 0;
const http_client_1 = __nccwpck_require__(9925); const http_client_1 = __nccwpck_require__(6255);
const auth_1 = __nccwpck_require__(3702); const auth_1 = __nccwpck_require__(5526);
const core_1 = __nccwpck_require__(2186); const core_1 = __nccwpck_require__(2186);
class OidcClient { class OidcClient {
static createHttpClient(allowRetry = true, maxRetry = 10) { static createHttpClient(allowRetry = true, maxRetry = 10) {
...@@ -960,97 +1199,452 @@ exports.OidcClient = OidcClient; ...@@ -960,97 +1199,452 @@ exports.OidcClient = OidcClient;
/***/ }), /***/ }),
/***/ 5278: /***/ 2981:
/***/ ((__unused_webpack_module, exports) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict"; "use strict";
// We use any as a valid input type var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
/* eslint-disable @typescript-eslint/no-explicit-any */ if (k2 === undefined) k2 = k;
Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } });
}) : (function(o, m, k, k2) {
if (k2 === undefined) k2 = k;
o[k2] = m[k];
}));
var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) {
Object.defineProperty(o, "default", { enumerable: true, value: v });
}) : function(o, v) {
o["default"] = v;
});
var __importStar = (this && this.__importStar) || function (mod) {
if (mod && mod.__esModule) return mod;
var result = {};
if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k);
__setModuleDefault(result, mod);
return result;
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toCommandProperties = exports.toCommandValue = void 0; exports.toPlatformPath = exports.toWin32Path = exports.toPosixPath = void 0;
const path = __importStar(__nccwpck_require__(1017));
/** /**
* Sanitizes an input into a string so it can be passed into issueCommand safely * toPosixPath converts the given path to the posix form. On Windows, \\ will be
* @param input input to sanitize into a string * replaced with /.
*
* @param pth. Path to transform.
* @return string Posix path.
*/ */
function toCommandValue(input) { function toPosixPath(pth) {
if (input === null || input === undefined) { return pth.replace(/[\\]/g, '/');
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
} }
exports.toCommandValue = toCommandValue; exports.toPosixPath = toPosixPath;
/** /**
* toWin32Path converts the given path to the win32 form. On Linux, / will be
* replaced with \\.
* *
* @param annotationProperties * @param pth. Path to transform.
* @returns The command properties to send with the actual annotation command * @return string Win32 path.
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/ */
function toCommandProperties(annotationProperties) { function toWin32Path(pth) {
if (!Object.keys(annotationProperties).length) { return pth.replace(/[/]/g, '\\');
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
} }
exports.toCommandProperties = toCommandProperties; exports.toWin32Path = toWin32Path;
//# sourceMappingURL=utils.js.map /**
* toPlatformPath converts the given path to a platform-specific path. It does
* this by replacing instances of / and \ with the platform-specific path
* separator.
*
* @param pth The path to platformize.
* @return string The platform-specific path.
*/
function toPlatformPath(pth) {
return pth.replace(/[/\\]/g, path.sep);
}
exports.toPlatformPath = toPlatformPath;
//# sourceMappingURL=path-utils.js.map
/***/ }), /***/ }),
/***/ 4087: /***/ 1327:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict"; "use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Context = void 0; exports.summary = exports.markdownSummary = exports.SUMMARY_DOCS_URL = exports.SUMMARY_ENV_VAR = void 0;
const fs_1 = __nccwpck_require__(7147);
const os_1 = __nccwpck_require__(2037); const os_1 = __nccwpck_require__(2037);
class Context { const fs_1 = __nccwpck_require__(7147);
const { access, appendFile, writeFile } = fs_1.promises;
exports.SUMMARY_ENV_VAR = 'GITHUB_STEP_SUMMARY';
exports.SUMMARY_DOCS_URL = 'https://docs.github.com/actions/using-workflows/workflow-commands-for-github-actions#adding-a-job-summary';
class Summary {
constructor() {
this._buffer = '';
}
/** /**
* Hydrate the context from the environment * Finds the summary file path from the environment, rejects if env var is not found or file does not exist
* Also checks r/w permissions.
*
* @returns step summary file path
*/ */
constructor() { filePath() {
var _a, _b, _c; return __awaiter(this, void 0, void 0, function* () {
this.payload = {}; if (this._filePath) {
if (process.env.GITHUB_EVENT_PATH) { return this._filePath;
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
} }
else { const pathFromEnv = process.env[exports.SUMMARY_ENV_VAR];
const path = process.env.GITHUB_EVENT_PATH; if (!pathFromEnv) {
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); throw new Error(`Unable to find environment variable for $${exports.SUMMARY_ENV_VAR}. Check if your runtime environment supports job summaries.`);
} }
} try {
this.eventName = process.env.GITHUB_EVENT_NAME; yield access(pathFromEnv, fs_1.constants.R_OK | fs_1.constants.W_OK);
this.sha = process.env.GITHUB_SHA; }
this.ref = process.env.GITHUB_REF; catch (_a) {
this.workflow = process.env.GITHUB_WORKFLOW; throw new Error(`Unable to access summary file: '${pathFromEnv}'. Check if the file has correct read/write permissions.`);
this.action = process.env.GITHUB_ACTION; }
this.actor = process.env.GITHUB_ACTOR; this._filePath = pathFromEnv;
this.job = process.env.GITHUB_JOB; return this._filePath;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); });
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
} }
get issue() { /**
const payload = this.payload; * Wraps content in an HTML tag, adding any HTML attributes
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); *
* @param {string} tag HTML tag to wrap
* @param {string | null} content content within the tag
* @param {[attribute: string]: string} attrs key-value list of HTML attributes to add
*
* @returns {string} content wrapped in HTML element
*/
wrap(tag, content, attrs = {}) {
const htmlAttrs = Object.entries(attrs)
.map(([key, value]) => ` ${key}="${value}"`)
.join('');
if (!content) {
return `<${tag}${htmlAttrs}>`;
}
return `<${tag}${htmlAttrs}>${content}</${tag}>`;
} }
get repo() { /**
if (process.env.GITHUB_REPOSITORY) { * Writes text in the buffer to the summary buffer file and empties buffer. Will append by default.
*
* @param {SummaryWriteOptions} [options] (optional) options for write operation
*
* @returns {Promise<Summary>} summary instance
*/
write(options) {
return __awaiter(this, void 0, void 0, function* () {
const overwrite = !!(options === null || options === void 0 ? void 0 : options.overwrite);
const filePath = yield this.filePath();
const writeFunc = overwrite ? writeFile : appendFile;
yield writeFunc(filePath, this._buffer, { encoding: 'utf8' });
return this.emptyBuffer();
});
}
/**
* Clears the summary buffer and wipes the summary file
*
* @returns {Summary} summary instance
*/
clear() {
return __awaiter(this, void 0, void 0, function* () {
return this.emptyBuffer().write({ overwrite: true });
});
}
/**
* Returns the current summary buffer as a string
*
* @returns {string} string of summary buffer
*/
stringify() {
return this._buffer;
}
/**
* If the summary buffer is empty
*
* @returns {boolen} true if the buffer is empty
*/
isEmptyBuffer() {
return this._buffer.length === 0;
}
/**
* Resets the summary buffer without writing to summary file
*
* @returns {Summary} summary instance
*/
emptyBuffer() {
this._buffer = '';
return this;
}
/**
* Adds raw text to the summary buffer
*
* @param {string} text content to add
* @param {boolean} [addEOL=false] (optional) append an EOL to the raw text (default: false)
*
* @returns {Summary} summary instance
*/
addRaw(text, addEOL = false) {
this._buffer += text;
return addEOL ? this.addEOL() : this;
}
/**
* Adds the operating system-specific end-of-line marker to the buffer
*
* @returns {Summary} summary instance
*/
addEOL() {
return this.addRaw(os_1.EOL);
}
/**
* Adds an HTML codeblock to the summary buffer
*
* @param {string} code content to render within fenced code block
* @param {string} lang (optional) language to syntax highlight code
*
* @returns {Summary} summary instance
*/
addCodeBlock(code, lang) {
const attrs = Object.assign({}, (lang && { lang }));
const element = this.wrap('pre', this.wrap('code', code), attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML list to the summary buffer
*
* @param {string[]} items list of items to render
* @param {boolean} [ordered=false] (optional) if the rendered list should be ordered or not (default: false)
*
* @returns {Summary} summary instance
*/
addList(items, ordered = false) {
const tag = ordered ? 'ol' : 'ul';
const listItems = items.map(item => this.wrap('li', item)).join('');
const element = this.wrap(tag, listItems);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML table to the summary buffer
*
* @param {SummaryTableCell[]} rows table rows
*
* @returns {Summary} summary instance
*/
addTable(rows) {
const tableBody = rows
.map(row => {
const cells = row
.map(cell => {
if (typeof cell === 'string') {
return this.wrap('td', cell);
}
const { header, data, colspan, rowspan } = cell;
const tag = header ? 'th' : 'td';
const attrs = Object.assign(Object.assign({}, (colspan && { colspan })), (rowspan && { rowspan }));
return this.wrap(tag, data, attrs);
})
.join('');
return this.wrap('tr', cells);
})
.join('');
const element = this.wrap('table', tableBody);
return this.addRaw(element).addEOL();
}
/**
* Adds a collapsable HTML details element to the summary buffer
*
* @param {string} label text for the closed state
* @param {string} content collapsable content
*
* @returns {Summary} summary instance
*/
addDetails(label, content) {
const element = this.wrap('details', this.wrap('summary', label) + content);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML image tag to the summary buffer
*
* @param {string} src path to the image you to embed
* @param {string} alt text description of the image
* @param {SummaryImageOptions} options (optional) addition image attributes
*
* @returns {Summary} summary instance
*/
addImage(src, alt, options) {
const { width, height } = options || {};
const attrs = Object.assign(Object.assign({}, (width && { width })), (height && { height }));
const element = this.wrap('img', null, Object.assign({ src, alt }, attrs));
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML section heading element
*
* @param {string} text heading text
* @param {number | string} [level=1] (optional) the heading level, default: 1
*
* @returns {Summary} summary instance
*/
addHeading(text, level) {
const tag = `h${level}`;
const allowedTag = ['h1', 'h2', 'h3', 'h4', 'h5', 'h6'].includes(tag)
? tag
: 'h1';
const element = this.wrap(allowedTag, text);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML thematic break (<hr>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addSeparator() {
const element = this.wrap('hr', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML line break (<br>) to the summary buffer
*
* @returns {Summary} summary instance
*/
addBreak() {
const element = this.wrap('br', null);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML blockquote to the summary buffer
*
* @param {string} text quote text
* @param {string} cite (optional) citation url
*
* @returns {Summary} summary instance
*/
addQuote(text, cite) {
const attrs = Object.assign({}, (cite && { cite }));
const element = this.wrap('blockquote', text, attrs);
return this.addRaw(element).addEOL();
}
/**
* Adds an HTML anchor tag to the summary buffer
*
* @param {string} text link text/content
* @param {string} href hyperlink
*
* @returns {Summary} summary instance
*/
addLink(text, href) {
const element = this.wrap('a', text, { href });
return this.addRaw(element).addEOL();
}
}
const _summary = new Summary();
/**
* @deprecated use `core.summary`
*/
exports.markdownSummary = _summary;
exports.summary = _summary;
//# sourceMappingURL=summary.js.map
/***/ }),
/***/ 5278:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
// We use any as a valid input type
/* eslint-disable @typescript-eslint/no-explicit-any */
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.toCommandProperties = exports.toCommandValue = void 0;
/**
* Sanitizes an input into a string so it can be passed into issueCommand safely
* @param input input to sanitize into a string
*/
function toCommandValue(input) {
if (input === null || input === undefined) {
return '';
}
else if (typeof input === 'string' || input instanceof String) {
return input;
}
return JSON.stringify(input);
}
exports.toCommandValue = toCommandValue;
/**
*
* @param annotationProperties
* @returns The command properties to send with the actual annotation command
* See IssueCommandProperties: https://github.com/actions/runner/blob/main/src/Runner.Worker/ActionCommandManager.cs#L646
*/
function toCommandProperties(annotationProperties) {
if (!Object.keys(annotationProperties).length) {
return {};
}
return {
title: annotationProperties.title,
file: annotationProperties.file,
line: annotationProperties.startLine,
endLine: annotationProperties.endLine,
col: annotationProperties.startColumn,
endColumn: annotationProperties.endColumn
};
}
exports.toCommandProperties = toCommandProperties;
//# sourceMappingURL=utils.js.map
/***/ }),
/***/ 4087:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.Context = void 0;
const fs_1 = __nccwpck_require__(7147);
const os_1 = __nccwpck_require__(2037);
class Context {
/**
* Hydrate the context from the environment
*/
constructor() {
var _a, _b, _c;
this.payload = {};
if (process.env.GITHUB_EVENT_PATH) {
if (fs_1.existsSync(process.env.GITHUB_EVENT_PATH)) {
this.payload = JSON.parse(fs_1.readFileSync(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' }));
}
else {
const path = process.env.GITHUB_EVENT_PATH;
process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`);
}
}
this.eventName = process.env.GITHUB_EVENT_NAME;
this.sha = process.env.GITHUB_SHA;
this.ref = process.env.GITHUB_REF;
this.workflow = process.env.GITHUB_WORKFLOW;
this.action = process.env.GITHUB_ACTION;
this.actor = process.env.GITHUB_ACTOR;
this.job = process.env.GITHUB_JOB;
this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10);
this.runId = parseInt(process.env.GITHUB_RUN_ID, 10);
this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`;
this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`;
this.graphqlUrl = (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`;
}
get issue() {
const payload = this.payload;
return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number });
}
get repo() {
if (process.env.GITHUB_REPOSITORY) {
const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/');
return { owner, repo }; return { owner, repo };
} }
...@@ -1137,7 +1731,7 @@ var __importStar = (this && this.__importStar) || function (mod) { ...@@ -1137,7 +1731,7 @@ var __importStar = (this && this.__importStar) || function (mod) {
}; };
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0; exports.getApiBaseUrl = exports.getProxyAgent = exports.getAuthString = void 0;
const httpClient = __importStar(__nccwpck_require__(6341)); const httpClient = __importStar(__nccwpck_require__(6255));
function getAuthString(token, options) { function getAuthString(token, options) {
if (!token && !options.auth) { if (!token && !options.auth) {
throw new Error('Parameter token or opts.auth is required'); throw new Error('Parameter token or opts.auth is required');
...@@ -1222,7 +1816,95 @@ exports.getOctokitOptions = getOctokitOptions; ...@@ -1222,7 +1816,95 @@ exports.getOctokitOptions = getOctokitOptions;
/***/ }), /***/ }),
/***/ 6341: /***/ 5526:
/***/ (function(__unused_webpack_module, exports) {
"use strict";
var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
return new (P || (P = Promise))(function (resolve, reject) {
function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
step((generator = generator.apply(thisArg, _arguments || [])).next());
});
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.PersonalAccessTokenCredentialHandler = exports.BearerCredentialHandler = exports.BasicCredentialHandler = void 0;
class BasicCredentialHandler {
constructor(username, password) {
this.username = username;
this.password = password;
}
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`${this.username}:${this.password}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Bearer ${this.token}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler {
constructor(token) {
this.token = token;
}
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
if (!options.headers) {
throw Error('The request has no headers');
}
options.headers['Authorization'] = `Basic ${Buffer.from(`PAT:${this.token}`).toString('base64')}`;
}
// This handler cannot handle 401
canHandleAuthentication() {
return false;
}
handleAuthentication() {
return __awaiter(this, void 0, void 0, function* () {
throw new Error('not implemented');
});
}
}
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
//# sourceMappingURL=auth.js.map
/***/ }),
/***/ 6255:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict"; "use strict";
...@@ -1260,7 +1942,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); ...@@ -1260,7 +1942,7 @@ Object.defineProperty(exports, "__esModule", ({ value: true }));
exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0; exports.HttpClient = exports.isHttps = exports.HttpClientResponse = exports.HttpClientError = exports.getProxyUrl = exports.MediaTypes = exports.Headers = exports.HttpCodes = void 0;
const http = __importStar(__nccwpck_require__(3685)); const http = __importStar(__nccwpck_require__(3685));
const https = __importStar(__nccwpck_require__(5687)); const https = __importStar(__nccwpck_require__(5687));
const pm = __importStar(__nccwpck_require__(3466)); const pm = __importStar(__nccwpck_require__(9835));
const tunnel = __importStar(__nccwpck_require__(4294)); const tunnel = __importStar(__nccwpck_require__(4294));
var HttpCodes; var HttpCodes;
(function (HttpCodes) { (function (HttpCodes) {
...@@ -1834,7 +2516,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa ...@@ -1834,7 +2516,7 @@ const lowercaseKeys = (obj) => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCa
/***/ }), /***/ }),
/***/ 3466: /***/ 9835:
/***/ ((__unused_webpack_module, exports) => { /***/ ((__unused_webpack_module, exports) => {
"use strict"; "use strict";
...@@ -1902,860 +2584,184 @@ exports.checkBypass = checkBypass; ...@@ -1902,860 +2584,184 @@ exports.checkBypass = checkBypass;
/***/ }), /***/ }),
/***/ 3702: /***/ 334:
/***/ ((__unused_webpack_module, exports) => { /***/ ((__unused_webpack_module, exports) => {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
class BasicCredentialHandler {
constructor(username, password) { const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
this.username = username; const REGEX_IS_INSTALLATION = /^ghs_/;
this.password = password; const REGEX_IS_USER_TO_SERVER = /^ghu_/;
} async function auth(token) {
prepareRequest(options) { const isApp = token.split(/\./).length === 3;
options.headers['Authorization'] = const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
'Basic ' + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
Buffer.from(this.username + ':' + this.password).toString('base64'); const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
} return {
// This handler cannot handle 401 type: "token",
canHandleAuthentication(response) { token: token,
return false; tokenType
} };
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
} }
exports.BasicCredentialHandler = BasicCredentialHandler;
class BearerCredentialHandler { /**
constructor(token) { * Prefix token for usage in the Authorization header
this.token = token; *
} * @param token OAuth token or JSON Web Token
// currently implements pre-authorization */
// TODO: support preAuth = false where it hooks on 401 function withAuthorizationPrefix(token) {
prepareRequest(options) { if (token.split(/\./).length === 3) {
options.headers['Authorization'] = 'Bearer ' + this.token; return `bearer ${token}`;
} }
// This handler cannot handle 401
canHandleAuthentication(response) { return `token ${token}`;
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
} }
exports.BearerCredentialHandler = BearerCredentialHandler;
class PersonalAccessTokenCredentialHandler { async function hook(token, request, route, parameters) {
constructor(token) { const endpoint = request.endpoint.merge(route, parameters);
this.token = token; endpoint.headers.authorization = withAuthorizationPrefix(token);
} return request(endpoint);
// currently implements pre-authorization
// TODO: support preAuth = false where it hooks on 401
prepareRequest(options) {
options.headers['Authorization'] =
'Basic ' + Buffer.from('PAT:' + this.token).toString('base64');
}
// This handler cannot handle 401
canHandleAuthentication(response) {
return false;
}
handleAuthentication(httpClient, requestInfo, objs) {
return null;
}
} }
exports.PersonalAccessTokenCredentialHandler = PersonalAccessTokenCredentialHandler;
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }), /***/ }),
/***/ 9925: /***/ 6762:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
const http = __nccwpck_require__(3685);
const https = __nccwpck_require__(5687); var universalUserAgent = __nccwpck_require__(5030);
const pm = __nccwpck_require__(6443); var beforeAfterHook = __nccwpck_require__(3682);
let tunnel; var request = __nccwpck_require__(6234);
var HttpCodes; var graphql = __nccwpck_require__(8467);
(function (HttpCodes) { var authToken = __nccwpck_require__(334);
HttpCodes[HttpCodes["OK"] = 200] = "OK";
HttpCodes[HttpCodes["MultipleChoices"] = 300] = "MultipleChoices"; function _objectWithoutPropertiesLoose(source, excluded) {
HttpCodes[HttpCodes["MovedPermanently"] = 301] = "MovedPermanently"; if (source == null) return {};
HttpCodes[HttpCodes["ResourceMoved"] = 302] = "ResourceMoved"; var target = {};
HttpCodes[HttpCodes["SeeOther"] = 303] = "SeeOther"; var sourceKeys = Object.keys(source);
HttpCodes[HttpCodes["NotModified"] = 304] = "NotModified"; var key, i;
HttpCodes[HttpCodes["UseProxy"] = 305] = "UseProxy";
HttpCodes[HttpCodes["SwitchProxy"] = 306] = "SwitchProxy"; for (i = 0; i < sourceKeys.length; i++) {
HttpCodes[HttpCodes["TemporaryRedirect"] = 307] = "TemporaryRedirect"; key = sourceKeys[i];
HttpCodes[HttpCodes["PermanentRedirect"] = 308] = "PermanentRedirect"; if (excluded.indexOf(key) >= 0) continue;
HttpCodes[HttpCodes["BadRequest"] = 400] = "BadRequest"; target[key] = source[key];
HttpCodes[HttpCodes["Unauthorized"] = 401] = "Unauthorized"; }
HttpCodes[HttpCodes["PaymentRequired"] = 402] = "PaymentRequired";
HttpCodes[HttpCodes["Forbidden"] = 403] = "Forbidden"; return target;
HttpCodes[HttpCodes["NotFound"] = 404] = "NotFound";
HttpCodes[HttpCodes["MethodNotAllowed"] = 405] = "MethodNotAllowed";
HttpCodes[HttpCodes["NotAcceptable"] = 406] = "NotAcceptable";
HttpCodes[HttpCodes["ProxyAuthenticationRequired"] = 407] = "ProxyAuthenticationRequired";
HttpCodes[HttpCodes["RequestTimeout"] = 408] = "RequestTimeout";
HttpCodes[HttpCodes["Conflict"] = 409] = "Conflict";
HttpCodes[HttpCodes["Gone"] = 410] = "Gone";
HttpCodes[HttpCodes["TooManyRequests"] = 429] = "TooManyRequests";
HttpCodes[HttpCodes["InternalServerError"] = 500] = "InternalServerError";
HttpCodes[HttpCodes["NotImplemented"] = 501] = "NotImplemented";
HttpCodes[HttpCodes["BadGateway"] = 502] = "BadGateway";
HttpCodes[HttpCodes["ServiceUnavailable"] = 503] = "ServiceUnavailable";
HttpCodes[HttpCodes["GatewayTimeout"] = 504] = "GatewayTimeout";
})(HttpCodes = exports.HttpCodes || (exports.HttpCodes = {}));
var Headers;
(function (Headers) {
Headers["Accept"] = "accept";
Headers["ContentType"] = "content-type";
})(Headers = exports.Headers || (exports.Headers = {}));
var MediaTypes;
(function (MediaTypes) {
MediaTypes["ApplicationJson"] = "application/json";
})(MediaTypes = exports.MediaTypes || (exports.MediaTypes = {}));
/**
* Returns the proxy URL, depending upon the supplied url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
function getProxyUrl(serverUrl) {
let proxyUrl = pm.getProxyUrl(new URL(serverUrl));
return proxyUrl ? proxyUrl.href : '';
}
exports.getProxyUrl = getProxyUrl;
const HttpRedirectCodes = [
HttpCodes.MovedPermanently,
HttpCodes.ResourceMoved,
HttpCodes.SeeOther,
HttpCodes.TemporaryRedirect,
HttpCodes.PermanentRedirect
];
const HttpResponseRetryCodes = [
HttpCodes.BadGateway,
HttpCodes.ServiceUnavailable,
HttpCodes.GatewayTimeout
];
const RetryableHttpVerbs = ['OPTIONS', 'GET', 'DELETE', 'HEAD'];
const ExponentialBackoffCeiling = 10;
const ExponentialBackoffTimeSlice = 5;
class HttpClientError extends Error {
constructor(message, statusCode) {
super(message);
this.name = 'HttpClientError';
this.statusCode = statusCode;
Object.setPrototypeOf(this, HttpClientError.prototype);
}
} }
exports.HttpClientError = HttpClientError;
class HttpClientResponse { function _objectWithoutProperties(source, excluded) {
constructor(message) { if (source == null) return {};
this.message = message;
} var target = _objectWithoutPropertiesLoose(source, excluded);
readBody() {
return new Promise(async (resolve, reject) => { var key, i;
let output = Buffer.alloc(0);
this.message.on('data', (chunk) => { if (Object.getOwnPropertySymbols) {
output = Buffer.concat([output, chunk]); var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
});
this.message.on('end', () => { for (i = 0; i < sourceSymbolKeys.length; i++) {
resolve(output.toString()); key = sourceSymbolKeys[i];
}); if (excluded.indexOf(key) >= 0) continue;
}); if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
} }
}
return target;
} }
exports.HttpClientResponse = HttpClientResponse;
function isHttps(requestUrl) { const VERSION = "3.6.0";
let parsedUrl = new URL(requestUrl);
return parsedUrl.protocol === 'https:'; const _excluded = ["authStrategy"];
} class Octokit {
exports.isHttps = isHttps; constructor(options = {}) {
class HttpClient { const hook = new beforeAfterHook.Collection();
constructor(userAgent, handlers, requestOptions) { const requestDefaults = {
this._ignoreSslError = false; baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
this._allowRedirects = true; headers: {},
this._allowRedirectDowngrade = false; request: Object.assign({}, options.request, {
this._maxRedirects = 50; // @ts-ignore internal usage only, no need to type
this._allowRetries = false; hook: hook.bind(null, "request")
this._maxRetries = 1; }),
this._keepAlive = false; mediaType: {
this._disposed = false; previews: [],
this.userAgent = userAgent; format: ""
this.handlers = handlers || []; }
this.requestOptions = requestOptions; }; // prepend default user agent with `options.userAgent` if set
if (requestOptions) {
if (requestOptions.ignoreSslError != null) { requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
this._ignoreSslError = requestOptions.ignoreSslError;
} if (options.baseUrl) {
this._socketTimeout = requestOptions.socketTimeout; requestDefaults.baseUrl = options.baseUrl;
if (requestOptions.allowRedirects != null) {
this._allowRedirects = requestOptions.allowRedirects;
}
if (requestOptions.allowRedirectDowngrade != null) {
this._allowRedirectDowngrade = requestOptions.allowRedirectDowngrade;
}
if (requestOptions.maxRedirects != null) {
this._maxRedirects = Math.max(requestOptions.maxRedirects, 0);
}
if (requestOptions.keepAlive != null) {
this._keepAlive = requestOptions.keepAlive;
}
if (requestOptions.allowRetries != null) {
this._allowRetries = requestOptions.allowRetries;
}
if (requestOptions.maxRetries != null) {
this._maxRetries = requestOptions.maxRetries;
}
}
}
options(requestUrl, additionalHeaders) {
return this.request('OPTIONS', requestUrl, null, additionalHeaders || {});
}
get(requestUrl, additionalHeaders) {
return this.request('GET', requestUrl, null, additionalHeaders || {});
}
del(requestUrl, additionalHeaders) {
return this.request('DELETE', requestUrl, null, additionalHeaders || {});
}
post(requestUrl, data, additionalHeaders) {
return this.request('POST', requestUrl, data, additionalHeaders || {});
}
patch(requestUrl, data, additionalHeaders) {
return this.request('PATCH', requestUrl, data, additionalHeaders || {});
} }
put(requestUrl, data, additionalHeaders) {
return this.request('PUT', requestUrl, data, additionalHeaders || {}); if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
} }
head(requestUrl, additionalHeaders) {
return this.request('HEAD', requestUrl, null, additionalHeaders || {}); if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
} }
sendStream(verb, requestUrl, stream, additionalHeaders) {
return this.request(verb, requestUrl, stream, additionalHeaders);
}
/**
* Gets a typed object from an endpoint
* Be aware that not found returns a null. Other errors (4xx, 5xx) reject the promise
*/
async getJson(requestUrl, additionalHeaders = {}) {
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
let res = await this.get(requestUrl, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async postJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.post(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async putJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.put(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
async patchJson(requestUrl, obj, additionalHeaders = {}) {
let data = JSON.stringify(obj, null, 2);
additionalHeaders[Headers.Accept] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.Accept, MediaTypes.ApplicationJson);
additionalHeaders[Headers.ContentType] = this._getExistingOrDefaultHeader(additionalHeaders, Headers.ContentType, MediaTypes.ApplicationJson);
let res = await this.patch(requestUrl, data, additionalHeaders);
return this._processResponse(res, this.requestOptions);
}
/**
* Makes a raw http request.
* All other methods such as get, post, patch, and request ultimately call this.
* Prefer get, del, post and patch
*/
async request(verb, requestUrl, data, headers) {
if (this._disposed) {
throw new Error('Client has already been disposed.');
}
let parsedUrl = new URL(requestUrl);
let info = this._prepareRequest(verb, parsedUrl, headers);
// Only perform retries on reads since writes may not be idempotent.
let maxTries = this._allowRetries && RetryableHttpVerbs.indexOf(verb) != -1
? this._maxRetries + 1
: 1;
let numTries = 0;
let response;
while (numTries < maxTries) {
response = await this.requestRaw(info, data);
// Check if it's an authentication challenge
if (response &&
response.message &&
response.message.statusCode === HttpCodes.Unauthorized) {
let authenticationHandler;
for (let i = 0; i < this.handlers.length; i++) {
if (this.handlers[i].canHandleAuthentication(response)) {
authenticationHandler = this.handlers[i];
break;
}
}
if (authenticationHandler) {
return authenticationHandler.handleAuthentication(this, info, data);
}
else {
// We have received an unauthorized response but have no handlers to handle it.
// Let the response return to the caller.
return response;
}
}
let redirectsRemaining = this._maxRedirects;
while (HttpRedirectCodes.indexOf(response.message.statusCode) != -1 &&
this._allowRedirects &&
redirectsRemaining > 0) {
const redirectUrl = response.message.headers['location'];
if (!redirectUrl) {
// if there's no location to redirect to, we won't
break;
}
let parsedRedirectUrl = new URL(redirectUrl);
if (parsedUrl.protocol == 'https:' &&
parsedUrl.protocol != parsedRedirectUrl.protocol &&
!this._allowRedirectDowngrade) {
throw new Error('Redirect from HTTPS to HTTP protocol. This downgrade is not allowed for security reasons. If you want to allow this behavior, set the allowRedirectDowngrade option to true.');
}
// we need to finish reading the response before reassigning response
// which will leak the open socket.
await response.readBody();
// strip authorization header if redirected to a different hostname
if (parsedRedirectUrl.hostname !== parsedUrl.hostname) {
for (let header in headers) {
// header names are case insensitive
if (header.toLowerCase() === 'authorization') {
delete headers[header];
}
}
}
// let's make the request with the new redirectUrl
info = this._prepareRequest(verb, parsedRedirectUrl, headers);
response = await this.requestRaw(info, data);
redirectsRemaining--;
}
if (HttpResponseRetryCodes.indexOf(response.message.statusCode) == -1) {
// If not a retry code, return immediately instead of retrying
return response;
}
numTries += 1;
if (numTries < maxTries) {
await response.readBody();
await this._performExponentialBackoff(numTries);
}
}
return response;
}
/**
* Needs to be called if keepAlive is set to true in request options.
*/
dispose() {
if (this._agent) {
this._agent.destroy();
}
this._disposed = true;
}
/**
* Raw request.
* @param info
* @param data
*/
requestRaw(info, data) {
return new Promise((resolve, reject) => {
let callbackForResult = function (err, res) {
if (err) {
reject(err);
}
resolve(res);
};
this.requestRawWithCallback(info, data, callbackForResult);
});
}
/**
* Raw request with callback.
* @param info
* @param data
* @param onResult
*/
requestRawWithCallback(info, data, onResult) {
let socket;
if (typeof data === 'string') {
info.options.headers['Content-Length'] = Buffer.byteLength(data, 'utf8');
}
let callbackCalled = false;
let handleResult = (err, res) => {
if (!callbackCalled) {
callbackCalled = true;
onResult(err, res);
}
};
let req = info.httpModule.request(info.options, (msg) => {
let res = new HttpClientResponse(msg);
handleResult(null, res);
});
req.on('socket', sock => {
socket = sock;
});
// If we ever get disconnected, we want the socket to timeout eventually
req.setTimeout(this._socketTimeout || 3 * 60000, () => {
if (socket) {
socket.end();
}
handleResult(new Error('Request timeout: ' + info.options.path), null);
});
req.on('error', function (err) {
// err has statusCode property
// res should have headers
handleResult(err, null);
});
if (data && typeof data === 'string') {
req.write(data, 'utf8');
}
if (data && typeof data !== 'string') {
data.on('close', function () {
req.end();
});
data.pipe(req);
}
else {
req.end();
}
}
/**
* Gets an http agent. This function is useful when you need an http agent that handles
* routing through a proxy server - depending upon the url and proxy environment variables.
* @param serverUrl The server URL where the request will be sent. For example, https://api.github.com
*/
getAgent(serverUrl) {
let parsedUrl = new URL(serverUrl);
return this._getAgent(parsedUrl);
}
_prepareRequest(method, requestUrl, headers) {
const info = {};
info.parsedUrl = requestUrl;
const usingSsl = info.parsedUrl.protocol === 'https:';
info.httpModule = usingSsl ? https : http;
const defaultPort = usingSsl ? 443 : 80;
info.options = {};
info.options.host = info.parsedUrl.hostname;
info.options.port = info.parsedUrl.port
? parseInt(info.parsedUrl.port)
: defaultPort;
info.options.path =
(info.parsedUrl.pathname || '') + (info.parsedUrl.search || '');
info.options.method = method;
info.options.headers = this._mergeHeaders(headers);
if (this.userAgent != null) {
info.options.headers['user-agent'] = this.userAgent;
}
info.options.agent = this._getAgent(info.parsedUrl);
// gives handlers an opportunity to participate
if (this.handlers) {
this.handlers.forEach(handler => {
handler.prepareRequest(info.options);
});
}
return info;
}
_mergeHeaders(headers) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
if (this.requestOptions && this.requestOptions.headers) {
return Object.assign({}, lowercaseKeys(this.requestOptions.headers), lowercaseKeys(headers));
}
return lowercaseKeys(headers || {});
}
_getExistingOrDefaultHeader(additionalHeaders, header, _default) {
const lowercaseKeys = obj => Object.keys(obj).reduce((c, k) => ((c[k.toLowerCase()] = obj[k]), c), {});
let clientHeader;
if (this.requestOptions && this.requestOptions.headers) {
clientHeader = lowercaseKeys(this.requestOptions.headers)[header];
}
return additionalHeaders[header] || clientHeader || _default;
}
_getAgent(parsedUrl) {
let agent;
let proxyUrl = pm.getProxyUrl(parsedUrl);
let useProxy = proxyUrl && proxyUrl.hostname;
if (this._keepAlive && useProxy) {
agent = this._proxyAgent;
}
if (this._keepAlive && !useProxy) {
agent = this._agent;
}
// if agent is already assigned use that agent.
if (!!agent) {
return agent;
}
const usingSsl = parsedUrl.protocol === 'https:';
let maxSockets = 100;
if (!!this.requestOptions) {
maxSockets = this.requestOptions.maxSockets || http.globalAgent.maxSockets;
}
if (useProxy) {
// If using proxy, need tunnel
if (!tunnel) {
tunnel = __nccwpck_require__(4294);
}
const agentOptions = {
maxSockets: maxSockets,
keepAlive: this._keepAlive,
proxy: {
...((proxyUrl.username || proxyUrl.password) && {
proxyAuth: `${proxyUrl.username}:${proxyUrl.password}`
}),
host: proxyUrl.hostname,
port: proxyUrl.port
}
};
let tunnelAgent;
const overHttps = proxyUrl.protocol === 'https:';
if (usingSsl) {
tunnelAgent = overHttps ? tunnel.httpsOverHttps : tunnel.httpsOverHttp;
}
else {
tunnelAgent = overHttps ? tunnel.httpOverHttps : tunnel.httpOverHttp;
}
agent = tunnelAgent(agentOptions);
this._proxyAgent = agent;
}
// if reusing agent across request and tunneling agent isn't assigned create a new agent
if (this._keepAlive && !agent) {
const options = { keepAlive: this._keepAlive, maxSockets: maxSockets };
agent = usingSsl ? new https.Agent(options) : new http.Agent(options);
this._agent = agent;
}
// if not using private agent and tunnel agent isn't setup then use global agent
if (!agent) {
agent = usingSsl ? https.globalAgent : http.globalAgent;
}
if (usingSsl && this._ignoreSslError) {
// we don't want to set NODE_TLS_REJECT_UNAUTHORIZED=0 since that will affect request for entire process
// http.RequestOptions doesn't expose a way to modify RequestOptions.agent.options
// we have to cast it to any and change it directly
agent.options = Object.assign(agent.options || {}, {
rejectUnauthorized: false
});
}
return agent;
}
_performExponentialBackoff(retryNumber) {
retryNumber = Math.min(ExponentialBackoffCeiling, retryNumber);
const ms = ExponentialBackoffTimeSlice * Math.pow(2, retryNumber);
return new Promise(resolve => setTimeout(() => resolve(), ms));
}
static dateTimeDeserializer(key, value) {
if (typeof value === 'string') {
let a = new Date(value);
if (!isNaN(a.valueOf())) {
return a;
}
}
return value;
}
async _processResponse(res, options) {
return new Promise(async (resolve, reject) => {
const statusCode = res.message.statusCode;
const response = {
statusCode: statusCode,
result: null,
headers: {}
};
// not found leads to null obj returned
if (statusCode == HttpCodes.NotFound) {
resolve(response);
}
let obj;
let contents;
// get the result from the body
try {
contents = await res.readBody();
if (contents && contents.length > 0) {
if (options && options.deserializeDates) {
obj = JSON.parse(contents, HttpClient.dateTimeDeserializer);
}
else {
obj = JSON.parse(contents);
}
response.result = obj;
}
response.headers = res.message.headers;
}
catch (err) {
// Invalid resource (contents not json); leaving result obj null
}
// note that 3xx redirects are handled by the http layer.
if (statusCode > 299) {
let msg;
// if exception/error in body, attempt to get better error
if (obj && obj.message) {
msg = obj.message;
}
else if (contents && contents.length > 0) {
// it may be the case that the exception is in the body message as string
msg = contents;
}
else {
msg = 'Failed request: (' + statusCode + ')';
}
let err = new HttpClientError(msg, statusCode);
err.result = response.result;
reject(err);
}
else {
resolve(response);
}
});
}
}
exports.HttpClient = HttpClient;
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
/***/ }), if (!options.authStrategy) {
if (!options.auth) {
/***/ 6443: // (1)
/***/ ((__unused_webpack_module, exports) => { this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
"use strict"; hook.wrap("request", auth.hook);
this.auth = auth;
Object.defineProperty(exports, "__esModule", ({ value: true })); }
function getProxyUrl(reqUrl) { } else {
let usingSsl = reqUrl.protocol === 'https:'; const {
let proxyUrl; authStrategy
if (checkBypass(reqUrl)) { } = options,
return proxyUrl; otherOptions = _objectWithoutProperties(options, _excluded);
}
let proxyVar;
if (usingSsl) {
proxyVar = process.env['https_proxy'] || process.env['HTTPS_PROXY'];
}
else {
proxyVar = process.env['http_proxy'] || process.env['HTTP_PROXY'];
}
if (proxyVar) {
proxyUrl = new URL(proxyVar);
}
return proxyUrl;
}
exports.getProxyUrl = getProxyUrl;
function checkBypass(reqUrl) {
if (!reqUrl.hostname) {
return false;
}
let noProxy = process.env['no_proxy'] || process.env['NO_PROXY'] || '';
if (!noProxy) {
return false;
}
// Determine the request port
let reqPort;
if (reqUrl.port) {
reqPort = Number(reqUrl.port);
}
else if (reqUrl.protocol === 'http:') {
reqPort = 80;
}
else if (reqUrl.protocol === 'https:') {
reqPort = 443;
}
// Format the request hostname and hostname with port
let upperReqHosts = [reqUrl.hostname.toUpperCase()];
if (typeof reqPort === 'number') {
upperReqHosts.push(`${upperReqHosts[0]}:${reqPort}`);
}
// Compare request host against noproxy
for (let upperNoProxyItem of noProxy
.split(',')
.map(x => x.trim().toUpperCase())
.filter(x => x)) {
if (upperReqHosts.some(x => x === upperNoProxyItem)) {
return true;
}
}
return false;
}
exports.checkBypass = checkBypass;
/***/ }),
/***/ 334:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const REGEX_IS_INSTALLATION_LEGACY = /^v1\./;
const REGEX_IS_INSTALLATION = /^ghs_/;
const REGEX_IS_USER_TO_SERVER = /^ghu_/;
async function auth(token) {
const isApp = token.split(/\./).length === 3;
const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token);
const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token);
const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth";
return {
type: "token",
token: token,
tokenType
};
}
/**
* Prefix token for usage in the Authorization header
*
* @param token OAuth token or JSON Web Token
*/
function withAuthorizationPrefix(token) {
if (token.split(/\./).length === 3) {
return `bearer ${token}`;
}
return `token ${token}`;
}
async function hook(token, request, route, parameters) {
const endpoint = request.endpoint.merge(route, parameters);
endpoint.headers.authorization = withAuthorizationPrefix(token);
return request(endpoint);
}
const createTokenAuth = function createTokenAuth(token) {
if (!token) {
throw new Error("[@octokit/auth-token] No token passed to createTokenAuth");
}
if (typeof token !== "string") {
throw new Error("[@octokit/auth-token] Token passed to createTokenAuth is not a string");
}
token = token.replace(/^(token|bearer) +/i, "");
return Object.assign(auth.bind(null, token), {
hook: hook.bind(null, token)
});
};
exports.createTokenAuth = createTokenAuth;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6762:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
var universalUserAgent = __nccwpck_require__(5030);
var beforeAfterHook = __nccwpck_require__(3682);
var request = __nccwpck_require__(6234);
var graphql = __nccwpck_require__(8467);
var authToken = __nccwpck_require__(334);
function _objectWithoutPropertiesLoose(source, excluded) {
if (source == null) return {};
var target = {};
var sourceKeys = Object.keys(source);
var key, i;
for (i = 0; i < sourceKeys.length; i++) {
key = sourceKeys[i];
if (excluded.indexOf(key) >= 0) continue;
target[key] = source[key];
}
return target;
}
function _objectWithoutProperties(source, excluded) {
if (source == null) return {};
var target = _objectWithoutPropertiesLoose(source, excluded);
var key, i;
if (Object.getOwnPropertySymbols) {
var sourceSymbolKeys = Object.getOwnPropertySymbols(source);
for (i = 0; i < sourceSymbolKeys.length; i++) {
key = sourceSymbolKeys[i];
if (excluded.indexOf(key) >= 0) continue;
if (!Object.prototype.propertyIsEnumerable.call(source, key)) continue;
target[key] = source[key];
}
}
return target;
}
const VERSION = "3.6.0";
const _excluded = ["authStrategy"];
class Octokit {
constructor(options = {}) {
const hook = new beforeAfterHook.Collection();
const requestDefaults = {
baseUrl: request.request.endpoint.DEFAULTS.baseUrl,
headers: {},
request: Object.assign({}, options.request, {
// @ts-ignore internal usage only, no need to type
hook: hook.bind(null, "request")
}),
mediaType: {
previews: [],
format: ""
}
}; // prepend default user agent with `options.userAgent` if set
requestDefaults.headers["user-agent"] = [options.userAgent, `octokit-core.js/${VERSION} ${universalUserAgent.getUserAgent()}`].filter(Boolean).join(" ");
if (options.baseUrl) {
requestDefaults.baseUrl = options.baseUrl;
}
if (options.previews) {
requestDefaults.mediaType.previews = options.previews;
}
if (options.timeZone) {
requestDefaults.headers["time-zone"] = options.timeZone;
}
this.request = request.request.defaults(requestDefaults);
this.graphql = graphql.withCustomRequest(this.request).defaults(requestDefaults);
this.log = Object.assign({
debug: () => {},
info: () => {},
warn: console.warn.bind(console),
error: console.error.bind(console)
}, options.log);
this.hook = hook; // (1) If neither `options.authStrategy` nor `options.auth` are set, the `octokit` instance
// is unauthenticated. The `this.auth()` method is a no-op and no request hook is registered.
// (2) If only `options.auth` is set, use the default token authentication strategy.
// (3) If `options.authStrategy` is set then use it and pass in `options.auth`. Always pass own request as many strategies accept a custom request instance.
// TODO: type `options.auth` based on `options.authStrategy`.
if (!options.authStrategy) {
if (!options.auth) {
// (1)
this.auth = async () => ({
type: "unauthenticated"
});
} else {
// (2)
const auth = authToken.createTokenAuth(options.auth); // @ts-ignore ¯\_(ツ)_/¯
hook.wrap("request", auth.hook);
this.auth = auth;
}
} else {
const {
authStrategy
} = options,
otherOptions = _objectWithoutProperties(options, _excluded);
const auth = authStrategy(Object.assign({ const auth = authStrategy(Object.assign({
request: this.request, request: this.request,
...@@ -3231,7 +3237,7 @@ exports.endpoint = endpoint; ...@@ -3231,7 +3237,7 @@ exports.endpoint = endpoint;
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
var request = __nccwpck_require__(3758); var request = __nccwpck_require__(6234);
var universalUserAgent = __nccwpck_require__(5030); var universalUserAgent = __nccwpck_require__(5030);
const VERSION = "4.8.0"; const VERSION = "4.8.0";
...@@ -3349,234 +3355,37 @@ exports.withCustomRequest = withCustomRequest; ...@@ -3349,234 +3355,37 @@ exports.withCustomRequest = withCustomRequest;
/***/ }), /***/ }),
/***/ 3758: /***/ 4193:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /***/ ((__unused_webpack_module, exports) => {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } const VERSION = "2.21.2";
var endpoint = __nccwpck_require__(9440); function ownKeys(object, enumerableOnly) {
var universalUserAgent = __nccwpck_require__(5030); var keys = Object.keys(object);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.2"; if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
enumerableOnly && (symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
})), keys.push.apply(keys, symbols);
}
function getBufferResponse(response) { return keys;
return response.arrayBuffer();
} }
function fetchWrapper(requestOptions) { function _objectSpread2(target) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; for (var i = 1; i < arguments.length; i++) {
var source = null != arguments[i] ? arguments[i] : {};
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { i % 2 ? ownKeys(Object(source), !0).forEach(function (key) {
requestOptions.body = JSON.stringify(requestOptions.body); _defineProperty(target, key, source[key]);
} }) : Object.getOwnPropertyDescriptors ? Object.defineProperties(target, Object.getOwnPropertyDescriptors(source)) : ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
let headers = {}; });
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
return data.message;
} // istanbul ignore next - just in case
return `Unknown error: ${JSON.stringify(data)}`;
}
function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
const newApi = function (route, parameters) {
const endpointOptions = endpoint.merge(route, parameters);
if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
}
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
}
});
exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 4193:
/***/ ((__unused_webpack_module, exports) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
const VERSION = "2.17.0";
function ownKeys(object, enumerableOnly) {
var keys = Object.keys(object);
if (Object.getOwnPropertySymbols) {
var symbols = Object.getOwnPropertySymbols(object);
if (enumerableOnly) {
symbols = symbols.filter(function (sym) {
return Object.getOwnPropertyDescriptor(object, sym).enumerable;
});
}
keys.push.apply(keys, symbols);
}
return keys;
}
function _objectSpread2(target) {
for (var i = 1; i < arguments.length; i++) {
var source = arguments[i] != null ? arguments[i] : {};
if (i % 2) {
ownKeys(Object(source), true).forEach(function (key) {
_defineProperty(target, key, source[key]);
});
} else if (Object.getOwnPropertyDescriptors) {
Object.defineProperties(target, Object.getOwnPropertyDescriptors(source));
} else {
ownKeys(Object(source)).forEach(function (key) {
Object.defineProperty(target, key, Object.getOwnPropertyDescriptor(source, key));
});
}
} }
return target; return target;
...@@ -3726,7 +3535,7 @@ const composePaginateRest = Object.assign(paginate, { ...@@ -3726,7 +3535,7 @@ const composePaginateRest = Object.assign(paginate, {
iterator iterator
}); });
const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/actions/runners/downloads", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/runners/downloads", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/blocks", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/events", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/team-sync/group-mappings", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runners/downloads", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/autolinks", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /scim/v2/enterprises/{enterprise}/Groups", "GET /scim/v2/enterprises/{enterprise}/Users", "GET /scim/v2/organizations/{org}/Users", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/team-sync/group-mappings", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"]; const paginatingEndpoints = ["GET /app/hook/deliveries", "GET /app/installations", "GET /applications/grants", "GET /authorizations", "GET /enterprises/{enterprise}/actions/permissions/organizations", "GET /enterprises/{enterprise}/actions/runner-groups", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/organizations", "GET /enterprises/{enterprise}/actions/runner-groups/{runner_group_id}/runners", "GET /enterprises/{enterprise}/actions/runners", "GET /enterprises/{enterprise}/audit-log", "GET /enterprises/{enterprise}/secret-scanning/alerts", "GET /enterprises/{enterprise}/settings/billing/advanced-security", "GET /events", "GET /gists", "GET /gists/public", "GET /gists/starred", "GET /gists/{gist_id}/comments", "GET /gists/{gist_id}/commits", "GET /gists/{gist_id}/forks", "GET /installation/repositories", "GET /issues", "GET /licenses", "GET /marketplace_listing/plans", "GET /marketplace_listing/plans/{plan_id}/accounts", "GET /marketplace_listing/stubbed/plans", "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", "GET /networks/{owner}/{repo}/events", "GET /notifications", "GET /organizations", "GET /orgs/{org}/actions/cache/usage-by-repository", "GET /orgs/{org}/actions/permissions/repositories", "GET /orgs/{org}/actions/runner-groups", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/repositories", "GET /orgs/{org}/actions/runner-groups/{runner_group_id}/runners", "GET /orgs/{org}/actions/runners", "GET /orgs/{org}/actions/secrets", "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", "GET /orgs/{org}/audit-log", "GET /orgs/{org}/blocks", "GET /orgs/{org}/code-scanning/alerts", "GET /orgs/{org}/codespaces", "GET /orgs/{org}/credential-authorizations", "GET /orgs/{org}/dependabot/secrets", "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", "GET /orgs/{org}/events", "GET /orgs/{org}/external-groups", "GET /orgs/{org}/failed_invitations", "GET /orgs/{org}/hooks", "GET /orgs/{org}/hooks/{hook_id}/deliveries", "GET /orgs/{org}/installations", "GET /orgs/{org}/invitations", "GET /orgs/{org}/invitations/{invitation_id}/teams", "GET /orgs/{org}/issues", "GET /orgs/{org}/members", "GET /orgs/{org}/migrations", "GET /orgs/{org}/migrations/{migration_id}/repositories", "GET /orgs/{org}/outside_collaborators", "GET /orgs/{org}/packages", "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", "GET /orgs/{org}/projects", "GET /orgs/{org}/public_members", "GET /orgs/{org}/repos", "GET /orgs/{org}/secret-scanning/alerts", "GET /orgs/{org}/settings/billing/advanced-security", "GET /orgs/{org}/team-sync/groups", "GET /orgs/{org}/teams", "GET /orgs/{org}/teams/{team_slug}/discussions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", "GET /orgs/{org}/teams/{team_slug}/invitations", "GET /orgs/{org}/teams/{team_slug}/members", "GET /orgs/{org}/teams/{team_slug}/projects", "GET /orgs/{org}/teams/{team_slug}/repos", "GET /orgs/{org}/teams/{team_slug}/teams", "GET /projects/columns/{column_id}/cards", "GET /projects/{project_id}/collaborators", "GET /projects/{project_id}/columns", "GET /repos/{owner}/{repo}/actions/artifacts", "GET /repos/{owner}/{repo}/actions/caches", "GET /repos/{owner}/{repo}/actions/runners", "GET /repos/{owner}/{repo}/actions/runs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", "GET /repos/{owner}/{repo}/actions/secrets", "GET /repos/{owner}/{repo}/actions/workflows", "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", "GET /repos/{owner}/{repo}/assignees", "GET /repos/{owner}/{repo}/branches", "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", "GET /repos/{owner}/{repo}/code-scanning/alerts", "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", "GET /repos/{owner}/{repo}/code-scanning/analyses", "GET /repos/{owner}/{repo}/codespaces", "GET /repos/{owner}/{repo}/codespaces/devcontainers", "GET /repos/{owner}/{repo}/codespaces/secrets", "GET /repos/{owner}/{repo}/collaborators", "GET /repos/{owner}/{repo}/comments", "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/commits", "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", "GET /repos/{owner}/{repo}/commits/{ref}/status", "GET /repos/{owner}/{repo}/commits/{ref}/statuses", "GET /repos/{owner}/{repo}/contributors", "GET /repos/{owner}/{repo}/dependabot/secrets", "GET /repos/{owner}/{repo}/deployments", "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", "GET /repos/{owner}/{repo}/environments", "GET /repos/{owner}/{repo}/events", "GET /repos/{owner}/{repo}/forks", "GET /repos/{owner}/{repo}/git/matching-refs/{ref}", "GET /repos/{owner}/{repo}/hooks", "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", "GET /repos/{owner}/{repo}/invitations", "GET /repos/{owner}/{repo}/issues", "GET /repos/{owner}/{repo}/issues/comments", "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/issues/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", "GET /repos/{owner}/{repo}/issues/{issue_number}/events", "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", "GET /repos/{owner}/{repo}/keys", "GET /repos/{owner}/{repo}/labels", "GET /repos/{owner}/{repo}/milestones", "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", "GET /repos/{owner}/{repo}/notifications", "GET /repos/{owner}/{repo}/pages/builds", "GET /repos/{owner}/{repo}/projects", "GET /repos/{owner}/{repo}/pulls", "GET /repos/{owner}/{repo}/pulls/comments", "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", "GET /repos/{owner}/{repo}/releases", "GET /repos/{owner}/{repo}/releases/{release_id}/assets", "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", "GET /repos/{owner}/{repo}/secret-scanning/alerts", "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", "GET /repos/{owner}/{repo}/stargazers", "GET /repos/{owner}/{repo}/subscribers", "GET /repos/{owner}/{repo}/tags", "GET /repos/{owner}/{repo}/teams", "GET /repos/{owner}/{repo}/topics", "GET /repositories", "GET /repositories/{repository_id}/environments/{environment_name}/secrets", "GET /search/code", "GET /search/commits", "GET /search/issues", "GET /search/labels", "GET /search/repositories", "GET /search/topics", "GET /search/users", "GET /teams/{team_id}/discussions", "GET /teams/{team_id}/discussions/{discussion_number}/comments", "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", "GET /teams/{team_id}/discussions/{discussion_number}/reactions", "GET /teams/{team_id}/invitations", "GET /teams/{team_id}/members", "GET /teams/{team_id}/projects", "GET /teams/{team_id}/repos", "GET /teams/{team_id}/teams", "GET /user/blocks", "GET /user/codespaces", "GET /user/codespaces/secrets", "GET /user/emails", "GET /user/followers", "GET /user/following", "GET /user/gpg_keys", "GET /user/installations", "GET /user/installations/{installation_id}/repositories", "GET /user/issues", "GET /user/keys", "GET /user/marketplace_purchases", "GET /user/marketplace_purchases/stubbed", "GET /user/memberships/orgs", "GET /user/migrations", "GET /user/migrations/{migration_id}/repositories", "GET /user/orgs", "GET /user/packages", "GET /user/packages/{package_type}/{package_name}/versions", "GET /user/public_emails", "GET /user/repos", "GET /user/repository_invitations", "GET /user/starred", "GET /user/subscriptions", "GET /user/teams", "GET /users", "GET /users/{username}/events", "GET /users/{username}/events/orgs/{org}", "GET /users/{username}/events/public", "GET /users/{username}/followers", "GET /users/{username}/following", "GET /users/{username}/gists", "GET /users/{username}/gpg_keys", "GET /users/{username}/keys", "GET /users/{username}/orgs", "GET /users/{username}/packages", "GET /users/{username}/projects", "GET /users/{username}/received_events", "GET /users/{username}/received_events/public", "GET /users/{username}/repos", "GET /users/{username}/starred", "GET /users/{username}/subscriptions"];
function isPaginatingEndpoint(arg) { function isPaginatingEndpoint(arg) {
if (typeof arg === "string") { if (typeof arg === "string") {
...@@ -3822,6 +3631,8 @@ function _defineProperty(obj, key, value) { ...@@ -3822,6 +3631,8 @@ function _defineProperty(obj, key, value) {
const Endpoints = { const Endpoints = {
actions: { actions: {
addCustomLabelsToSelfHostedRunnerForOrg: ["POST /orgs/{org}/actions/runners/{runner_id}/labels"],
addCustomLabelsToSelfHostedRunnerForRepo: ["POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"], approveWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve"],
cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"], cancelWorkflowRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel"],
...@@ -3833,6 +3644,8 @@ const Endpoints = { ...@@ -3833,6 +3644,8 @@ const Endpoints = {
createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"],
createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"], createRemoveTokenForRepo: ["POST /repos/{owner}/{repo}/actions/runners/remove-token"],
createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"], createWorkflowDispatch: ["POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches"],
deleteActionsCacheById: ["DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}"],
deleteActionsCacheByKey: ["DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}"],
deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], deleteArtifact: ["DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], deleteEnvironmentSecret: ["DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"],
...@@ -3849,11 +3662,19 @@ const Endpoints = { ...@@ -3849,11 +3662,19 @@ const Endpoints = {
downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"], downloadWorkflowRunLogs: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs"],
enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"], enableSelectedRepositoryGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories/{repository_id}"],
enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"], enableWorkflow: ["PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable"],
getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"],
getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"],
getActionsCacheUsageByRepoForOrg: ["GET /orgs/{org}/actions/cache/usage-by-repository"],
getActionsCacheUsageForEnterprise: ["GET /enterprises/{enterprise}/actions/cache/usage"],
getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"],
getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"], getAllowedActionsOrganization: ["GET /orgs/{org}/actions/permissions/selected-actions"],
getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"], getAllowedActionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/selected-actions"],
getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"],
getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"], getEnvironmentPublicKey: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key"],
getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"], getEnvironmentSecret: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}"],
getGithubActionsDefaultWorkflowPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsOrganization: ["GET /orgs/{org}/actions/permissions/workflow"],
getGithubActionsDefaultWorkflowPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions/workflow"],
getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"], getGithubActionsPermissionsOrganization: ["GET /orgs/{org}/actions/permissions"],
getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"], getGithubActionsPermissionsRepository: ["GET /repos/{owner}/{repo}/actions/permissions"],
getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"],
...@@ -3869,6 +3690,7 @@ const Endpoints = { ...@@ -3869,6 +3690,7 @@ const Endpoints = {
getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"],
getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"], getSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}"],
getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"],
getWorkflowAccessToRepository: ["GET /repos/{owner}/{repo}/actions/permissions/access"],
getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"],
getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"], getWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}"],
getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"], getWorkflowRunUsage: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing"],
...@@ -3877,6 +3699,8 @@ const Endpoints = { ...@@ -3877,6 +3699,8 @@ const Endpoints = {
listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"], listEnvironmentSecrets: ["GET /repositories/{repository_id}/environments/{environment_name}/secrets"],
listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"], listJobsForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs"],
listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"], listJobsForWorkflowRunAttempt: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs"],
listLabelsForSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}/labels"],
listLabelsForSelfHostedRunnerForRepo: ["GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], listOrgSecrets: ["GET /orgs/{org}/actions/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"],
listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"],
...@@ -3889,14 +3713,27 @@ const Endpoints = { ...@@ -3889,14 +3713,27 @@ const Endpoints = {
listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"], listWorkflowRunArtifacts: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts"],
listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"], listWorkflowRuns: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs"],
listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"],
reRunJobForWorkflowRun: ["POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun"],
reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"],
reRunWorkflowFailedJobs: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs"],
removeAllCustomLabelsFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels"],
removeAllCustomLabelsFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForOrg: ["DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}"],
removeCustomLabelFromSelfHostedRunnerForRepo: ["DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"], removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}"],
reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"], reviewPendingDeploymentsForRun: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments"],
setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"], setAllowedActionsOrganization: ["PUT /orgs/{org}/actions/permissions/selected-actions"],
setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"], setAllowedActionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForOrg: ["PUT /orgs/{org}/actions/runners/{runner_id}/labels"],
setCustomLabelsForSelfHostedRunnerForRepo: ["PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels"],
setGithubActionsDefaultWorkflowPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions/workflow"],
setGithubActionsDefaultWorkflowPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/workflow"],
setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"], setGithubActionsPermissionsOrganization: ["PUT /orgs/{org}/actions/permissions"],
setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"], setGithubActionsPermissionsRepository: ["PUT /repos/{owner}/{repo}/actions/permissions"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"], setSelectedReposForOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}/repositories"],
setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"] setSelectedRepositoriesEnabledGithubActionsOrganization: ["PUT /orgs/{org}/actions/permissions/repositories"],
setWorkflowAccessToRepository: ["PUT /repos/{owner}/{repo}/actions/permissions/access"]
}, },
activity: { activity: {
checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"],
...@@ -3937,16 +3774,6 @@ const Endpoints = { ...@@ -3937,16 +3774,6 @@ const Endpoints = {
}], }],
addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"], addRepoToInstallationForAuthenticatedUser: ["PUT /user/installations/{installation_id}/repositories/{repository_id}"],
checkToken: ["POST /applications/{client_id}/token"], checkToken: ["POST /applications/{client_id}/token"],
createContentAttachment: ["POST /content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createContentAttachmentForRepo: ["POST /repos/{owner}/{repo}/content_references/{content_reference_id}/attachments", {
mediaType: {
previews: ["corsair"]
}
}],
createFromManifest: ["POST /app-manifests/{code}/conversions"], createFromManifest: ["POST /app-manifests/{code}/conversions"],
createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"], createInstallationAccessToken: ["POST /app/installations/{installation_id}/access_tokens"],
deleteAuthorization: ["DELETE /applications/{client_id}/grant"], deleteAuthorization: ["DELETE /applications/{client_id}/grant"],
...@@ -3988,6 +3815,8 @@ const Endpoints = { ...@@ -3988,6 +3815,8 @@ const Endpoints = {
billing: { billing: {
getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"],
getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"], getGithubActionsBillingUser: ["GET /users/{username}/settings/billing/actions"],
getGithubAdvancedSecurityBillingGhe: ["GET /enterprises/{enterprise}/settings/billing/advanced-security"],
getGithubAdvancedSecurityBillingOrg: ["GET /orgs/{org}/settings/billing/advanced-security"],
getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"],
getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"], getGithubPackagesBillingUser: ["GET /users/{username}/settings/billing/packages"],
getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"], getSharedStorageBillingOrg: ["GET /orgs/{org}/settings/billing/shared-storage"],
...@@ -4017,6 +3846,7 @@ const Endpoints = { ...@@ -4017,6 +3846,7 @@ const Endpoints = {
getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"], getAnalysis: ["GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}"],
getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"],
listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"], listAlertInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances"],
listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"],
listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, { listAlertsInstances: ["GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", {}, {
renamed: ["codeScanning", "listAlertInstances"] renamed: ["codeScanning", "listAlertInstances"]
...@@ -4029,16 +3859,80 @@ const Endpoints = { ...@@ -4029,16 +3859,80 @@ const Endpoints = {
getAllCodesOfConduct: ["GET /codes_of_conduct"], getAllCodesOfConduct: ["GET /codes_of_conduct"],
getConductCode: ["GET /codes_of_conduct/{key}"] getConductCode: ["GET /codes_of_conduct/{key}"]
}, },
codespaces: {
addRepositoryForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
codespaceMachinesForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/machines"],
createForAuthenticatedUser: ["POST /user/codespaces"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
createOrUpdateSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}"],
createWithPrForAuthenticatedUser: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces"],
createWithRepoForAuthenticatedUser: ["POST /repos/{owner}/{repo}/codespaces"],
deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"],
deleteFromOrganization: ["DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
deleteSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}"],
exportForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/exports"],
getExportDetailsForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}/exports/{export_id}"],
getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"],
getPublicKeyForAuthenticatedUser: ["GET /user/codespaces/secrets/public-key"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/codespaces/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}"],
getSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}"],
listDevcontainersInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/devcontainers"],
listForAuthenticatedUser: ["GET /user/codespaces"],
listInOrganization: ["GET /orgs/{org}/codespaces", {}, {
renamedParameters: {
org_id: "org"
}
}],
listInRepositoryForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"],
listRepositoriesForSecretForAuthenticatedUser: ["GET /user/codespaces/secrets/{secret_name}/repositories"],
listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"],
removeRepositoryForSecretForAuthenticatedUser: ["DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}"],
repoMachinesForAuthenticatedUser: ["GET /repos/{owner}/{repo}/codespaces/machines"],
setRepositoriesForSecretForAuthenticatedUser: ["PUT /user/codespaces/secrets/{secret_name}/repositories"],
startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"],
stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"],
stopInOrganization: ["POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop"],
updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"]
},
dependabot: {
addSelectedRepoToOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
createOrUpdateOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}"],
createOrUpdateRepoSecret: ["PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"],
deleteRepoSecret: ["DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"],
getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"],
getRepoPublicKey: ["GET /repos/{owner}/{repo}/dependabot/secrets/public-key"],
getRepoSecret: ["GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}"],
listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"],
listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"],
listSelectedReposForOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories"],
removeSelectedRepoFromOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}"],
setSelectedReposForOrgSecret: ["PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories"]
},
dependencyGraph: {
createRepositorySnapshot: ["POST /repos/{owner}/{repo}/dependency-graph/snapshots"],
diffRange: ["GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}"]
},
emojis: { emojis: {
get: ["GET /emojis"] get: ["GET /emojis"]
}, },
enterpriseAdmin: { enterpriseAdmin: {
addCustomLabelsToSelfHostedRunnerForEnterprise: ["POST /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], disableSelectedOrganizationGithubActionsEnterprise: ["DELETE /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"], enableSelectedOrganizationGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations/{org_id}"],
getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"], getAllowedActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/selected-actions"],
getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"], getGithubActionsPermissionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions"],
getServerStatistics: ["GET /enterprise-installation/{enterprise_or_org}/server-statistics"],
listLabelsForSelfHostedRunnerForEnterprise: ["GET /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"], listSelectedOrganizationsEnabledGithubActionsEnterprise: ["GET /enterprises/{enterprise}/actions/permissions/organizations"],
removeAllCustomLabelsFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
removeCustomLabelFromSelfHostedRunnerForEnterprise: ["DELETE /enterprises/{enterprise}/actions/runners/{runner_id}/labels/{name}"],
setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"], setAllowedActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/selected-actions"],
setCustomLabelsForSelfHostedRunnerForEnterprise: ["PUT /enterprises/{enterprise}/actions/runners/{runner_id}/labels"],
setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"], setGithubActionsPermissionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions"],
setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"] setSelectedOrganizationsEnabledGithubActionsEnterprise: ["PUT /enterprises/{enterprise}/actions/permissions/organizations"]
}, },
...@@ -4209,6 +4103,7 @@ const Endpoints = { ...@@ -4209,6 +4103,7 @@ const Endpoints = {
list: ["GET /organizations"], list: ["GET /organizations"],
listAppInstallations: ["GET /orgs/{org}/installations"], listAppInstallations: ["GET /orgs/{org}/installations"],
listBlockedUsers: ["GET /orgs/{org}/blocks"], listBlockedUsers: ["GET /orgs/{org}/blocks"],
listCustomRoles: ["GET /organizations/{organization_id}/custom_roles"],
listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], listFailedInvitations: ["GET /orgs/{org}/failed_invitations"],
listForAuthenticatedUser: ["GET /user/orgs"], listForAuthenticatedUser: ["GET /user/orgs"],
listForUser: ["GET /users/{username}/orgs"], listForUser: ["GET /users/{username}/orgs"],
...@@ -4337,12 +4232,14 @@ const Endpoints = { ...@@ -4337,12 +4232,14 @@ const Endpoints = {
deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"], deleteForIssue: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}"],
deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"], deleteForIssueComment: ["DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}"],
deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"], deleteForPullRequestComment: ["DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}"],
deleteForRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}"],
deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"], deleteForTeamDiscussion: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}"],
deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"], deleteForTeamDiscussionComment: ["DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}"],
listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"], listForCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}/reactions"],
listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"],
listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"], listForIssueComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions"],
listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"], listForPullRequestReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions"],
listForRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}/reactions"],
listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"], listForTeamDiscussionCommentInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions"],
listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"] listForTeamDiscussionInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions"]
}, },
...@@ -4366,6 +4263,7 @@ const Endpoints = { ...@@ -4366,6 +4263,7 @@ const Endpoints = {
}], }],
checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"],
checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"], checkVulnerabilityAlerts: ["GET /repos/{owner}/{repo}/vulnerability-alerts"],
codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"],
compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"],
compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"], compareCommitsWithBasehead: ["GET /repos/{owner}/{repo}/compare/{basehead}"],
createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], createAutolink: ["POST /repos/{owner}/{repo}/autolinks"],
...@@ -4383,6 +4281,7 @@ const Endpoints = { ...@@ -4383,6 +4281,7 @@ const Endpoints = {
createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"],
createPagesSite: ["POST /repos/{owner}/{repo}/pages"], createPagesSite: ["POST /repos/{owner}/{repo}/pages"],
createRelease: ["POST /repos/{owner}/{repo}/releases"], createRelease: ["POST /repos/{owner}/{repo}/releases"],
createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"],
createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"], createUsingTemplate: ["POST /repos/{template_owner}/{template_repo}/generate"],
createWebhook: ["POST /repos/{owner}/{repo}/hooks"], createWebhook: ["POST /repos/{owner}/{repo}/hooks"],
declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, { declineInvitation: ["DELETE /user/repository_invitations/{invitation_id}", {}, {
...@@ -4405,6 +4304,7 @@ const Endpoints = { ...@@ -4405,6 +4304,7 @@ const Endpoints = {
deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"], deletePullRequestReviewProtection: ["DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews"],
deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"],
deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"], deleteReleaseAsset: ["DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}"],
deleteTagProtection: ["DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}"],
deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"],
disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"], disableAutomatedSecurityFixes: ["DELETE /repos/{owner}/{repo}/automated-security-fixes"],
disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"], disableLfsForRepo: ["DELETE /repos/{owner}/{repo}/lfs"],
...@@ -4423,11 +4323,7 @@ const Endpoints = { ...@@ -4423,11 +4323,7 @@ const Endpoints = {
getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], getAdminBranchProtection: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"],
getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"], getAllStatusCheckContexts: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts"],
getAllTopics: ["GET /repos/{owner}/{repo}/topics", { getAllTopics: ["GET /repos/{owner}/{repo}/topics"],
mediaType: {
previews: ["mercy"]
}
}],
getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"], getAppsWithAccessToProtectedBranch: ["GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps"],
getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"],
getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"],
...@@ -4493,6 +4389,7 @@ const Endpoints = { ...@@ -4493,6 +4389,7 @@ const Endpoints = {
listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"], listPullRequestsAssociatedWithCommit: ["GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls"],
listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"], listReleaseAssets: ["GET /repos/{owner}/{repo}/releases/{release_id}/assets"],
listReleases: ["GET /repos/{owner}/{repo}/releases"], listReleases: ["GET /repos/{owner}/{repo}/releases"],
listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"],
listTags: ["GET /repos/{owner}/{repo}/tags"], listTags: ["GET /repos/{owner}/{repo}/tags"],
listTeams: ["GET /repos/{owner}/{repo}/teams"], listTeams: ["GET /repos/{owner}/{repo}/teams"],
listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"], listWebhookDeliveries: ["GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries"],
...@@ -4516,11 +4413,7 @@ const Endpoints = { ...@@ -4516,11 +4413,7 @@ const Endpoints = {
mapToData: "users" mapToData: "users"
}], }],
renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"],
replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics", { replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"],
mediaType: {
previews: ["mercy"]
}
}],
requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"],
setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"], setAdminBranchProtection: ["POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins"],
setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, { setAppAccessRestrictions: ["PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", {}, {
...@@ -4561,17 +4454,15 @@ const Endpoints = { ...@@ -4561,17 +4454,15 @@ const Endpoints = {
issuesAndPullRequests: ["GET /search/issues"], issuesAndPullRequests: ["GET /search/issues"],
labels: ["GET /search/labels"], labels: ["GET /search/labels"],
repos: ["GET /search/repositories"], repos: ["GET /search/repositories"],
topics: ["GET /search/topics", { topics: ["GET /search/topics"],
mediaType: {
previews: ["mercy"]
}
}],
users: ["GET /search/users"] users: ["GET /search/users"]
}, },
secretScanning: { secretScanning: {
getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"], getAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"],
listAlertsForEnterprise: ["GET /enterprises/{enterprise}/secret-scanning/alerts"],
listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"],
listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"],
listLocationsForAlert: ["GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations"],
updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"] updateAlert: ["PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}"]
}, },
teams: { teams: {
...@@ -4687,1185 +4578,1842 @@ const Endpoints = { ...@@ -4687,1185 +4578,1842 @@ const Endpoints = {
} }
}; };
const VERSION = "5.13.0"; const VERSION = "5.16.2";
function endpointsToMethods(octokit, endpointsMap) {
const newMethods = {};
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) {
newMethods[scope] = {};
}
const scopeMethods = newMethods[scope];
if (decorations) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations);
continue;
}
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults);
}
}
return newMethods;
}
function decorate(octokit, scope, methodName, defaults, decorations) {
const requestWithDefaults = octokit.request.defaults(defaults);
/* istanbul ignore next */
function withDecorations(...args) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData`
if (decorations.mapToData) {
options = Object.assign({}, options, {
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
if (decorations.renamed) {
const [newScope, newMethodName] = decorations.renamed;
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
if (decorations.deprecated) {
octokit.log.warn(decorations.deprecated);
}
if (decorations.renamedParameters) {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
const options = requestWithDefaults.endpoint.merge(...args);
for (const [name, alias] of Object.entries(decorations.renamedParameters)) {
if (name in options) {
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`);
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name];
}
}
return requestWithDefaults(options);
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488
return requestWithDefaults(...args);
}
return Object.assign(withDecorations, requestWithDefaults);
}
function restEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return {
rest: api
};
}
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints);
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api
});
}
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 537:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var deprecation = __nccwpck_require__(8932);
var once = _interopDefault(__nccwpck_require__(1223));
const logOnceCode = once(deprecation => console.warn(deprecation));
const logOnceHeaders = once(deprecation => console.warn(deprecation));
/**
* Error with extra properties to help with debugging
*/
class RequestError extends Error {
constructor(message, statusCode, options) {
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */
if (Error.captureStackTrace) {
Error.captureStackTrace(this, this.constructor);
}
this.name = "HttpError";
this.status = statusCode;
let headers;
if ("headers" in options && typeof options.headers !== "undefined") {
headers = options.headers;
}
if ("response" in options) {
this.response = options.response;
headers = options.response.headers;
} // redact request credentials without mutating original request options
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) {
requestCopy.headers = Object.assign({}, options.request.headers, {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]")
});
}
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]");
this.request = requestCopy; // deprecations
Object.defineProperty(this, "code", {
get() {
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`."));
return statusCode;
}
});
Object.defineProperty(this, "headers", {
get() {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`."));
return headers || {};
}
});
}
}
exports.RequestError = RequestError;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 6234:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; }
var endpoint = __nccwpck_require__(9440);
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.3";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {};
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) {
headers[keyAndValue[0]] = keyAndValue[1];
}
if ("deprecation" in headers) {
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/);
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) {
return;
} // GitHub API returns 200 for HEAD requests
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, {
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) {
throw new requestError.RequestError("Not modified", status, {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) {
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response);
}).then(data => {
return {
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) {
const contentType = response.headers.get("content-type");
if (/application\/json/.test(contentType)) {
return response.json();
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) {
return response.text();
}
return getBufferResponse(response);
}
function toErrorMessage(data) {
if (typeof data === "string") return data; // istanbul ignore else - just in case
if ("message" in data) {
if (Array.isArray(data.errors)) {
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
function endpointsToMethods(octokit, endpointsMap) { return data.message;
const newMethods = {}; } // istanbul ignore next - just in case
for (const [scope, endpoints] of Object.entries(endpointsMap)) {
for (const [methodName, endpoint] of Object.entries(endpoints)) {
const [route, defaults, decorations] = endpoint;
const [method, url] = route.split(/ /);
const endpointDefaults = Object.assign({
method,
url
}, defaults);
if (!newMethods[scope]) { return `Unknown error: ${JSON.stringify(data)}`;
newMethods[scope] = {}; }
}
const scopeMethods = newMethods[scope]; function withDefaults(oldEndpoint, newDefaults) {
const endpoint = oldEndpoint.defaults(newDefaults);
if (decorations) { const newApi = function (route, parameters) {
scopeMethods[methodName] = decorate(octokit, scope, methodName, endpointDefaults, decorations); const endpointOptions = endpoint.merge(route, parameters);
continue;
}
scopeMethods[methodName] = octokit.request.defaults(endpointDefaults); if (!endpointOptions.request || !endpointOptions.request.hook) {
return fetchWrapper(endpoint.parse(endpointOptions));
} }
const request = (route, parameters) => {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters)));
};
Object.assign(request, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, {
endpoint,
defaults: withDefaults.bind(null, endpoint)
});
}
const request = withDefaults(endpoint.endpoint, {
headers: {
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}`
} }
});
return newMethods; exports.request = request;
//# sourceMappingURL=index.js.map
/***/ }),
/***/ 3682:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
var register = __nccwpck_require__(4670)
var addHook = __nccwpck_require__(5549)
var removeHook = __nccwpck_require__(6819)
// bind with array of arguments: https://stackoverflow.com/a/21792913
var bind = Function.bind
var bindable = bind.bind(bind)
function bindApi (hook, state, name) {
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state])
hook.api = { remove: removeHookRef }
hook.remove = removeHookRef
;['before', 'error', 'after', 'wrap'].forEach(function (kind) {
var args = name ? [state, kind, name] : [state, kind]
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args)
})
} }
function decorate(octokit, scope, methodName, defaults, decorations) { function HookSingular () {
const requestWithDefaults = octokit.request.defaults(defaults); var singularHookName = 'h'
/* istanbul ignore next */ var singularHookState = {
registry: {}
}
var singularHook = register.bind(null, singularHookState, singularHookName)
bindApi(singularHook, singularHookState, singularHookName)
return singularHook
}
function withDecorations(...args) { function HookCollection () {
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 var state = {
let options = requestWithDefaults.endpoint.merge(...args); // There are currently no other decorations than `.mapToData` registry: {}
}
if (decorations.mapToData) { var hook = register.bind(null, state)
options = Object.assign({}, options, { bindApi(hook, state)
data: options[decorations.mapToData],
[decorations.mapToData]: undefined
});
return requestWithDefaults(options);
}
if (decorations.renamed) { return hook
const [newScope, newMethodName] = decorations.renamed; }
octokit.log.warn(`octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()`);
}
if (decorations.deprecated) { var collectionHookDeprecationMessageDisplayed = false
octokit.log.warn(decorations.deprecated); function Hook () {
} if (!collectionHookDeprecationMessageDisplayed) {
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4')
collectionHookDeprecationMessageDisplayed = true
}
return HookCollection()
}
if (decorations.renamedParameters) { Hook.Singular = HookSingular.bind()
// @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 Hook.Collection = HookCollection.bind()
const options = requestWithDefaults.endpoint.merge(...args);
for (const [name, alias] of Object.entries(decorations.renamedParameters)) { module.exports = Hook
if (name in options) { // expose constructors as a named property for TypeScript
octokit.log.warn(`"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead`); module.exports.Hook = Hook
module.exports.Singular = Hook.Singular
module.exports.Collection = Hook.Collection
if (!(alias in options)) {
options[alias] = options[name];
}
delete options[name]; /***/ }),
}
}
return requestWithDefaults(options); /***/ 5549:
} // @ts-ignore https://github.com/microsoft/TypeScript/issues/25488 /***/ ((module) => {
module.exports = addHook;
return requestWithDefaults(...args); function addHook(state, kind, name, hook) {
var orig = hook;
if (!state.registry[name]) {
state.registry[name] = [];
} }
return Object.assign(withDecorations, requestWithDefaults); if (kind === "before") {
} hook = function (method, options) {
return Promise.resolve()
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
}
function restEndpointMethods(octokit) { if (kind === "after") {
const api = endpointsToMethods(octokit, Endpoints); hook = function (method, options) {
return { var result;
rest: api return Promise.resolve()
}; .then(method.bind(null, options))
.then(function (result_) {
result = result_;
return orig(result, options);
})
.then(function () {
return result;
});
};
}
if (kind === "error") {
hook = function (method, options) {
return Promise.resolve()
.then(method.bind(null, options))
.catch(function (error) {
return orig(error, options);
});
};
}
state.registry[name].push({
hook: hook,
orig: orig,
});
} }
restEndpointMethods.VERSION = VERSION;
function legacyRestEndpointMethods(octokit) {
const api = endpointsToMethods(octokit, Endpoints); /***/ }),
return _objectSpread2(_objectSpread2({}, api), {}, {
rest: api /***/ 4670:
/***/ ((module) => {
module.exports = register;
function register(state, name, method, options) {
if (typeof method !== "function") {
throw new Error("method for before hook must be a function");
}
if (!options) {
options = {};
}
if (Array.isArray(name)) {
return name.reverse().reduce(function (callback, name) {
return register.bind(null, state, name, callback, options);
}, method)();
}
return Promise.resolve().then(function () {
if (!state.registry[name]) {
return method(options);
}
return state.registry[name].reduce(function (method, registered) {
return registered.hook.bind(null, method, options);
}, method)();
}); });
} }
legacyRestEndpointMethods.VERSION = VERSION;
exports.legacyRestEndpointMethods = legacyRestEndpointMethods;
exports.restEndpointMethods = restEndpointMethods; /***/ }),
//# sourceMappingURL=index.js.map
/***/ 6819:
/***/ ((module) => {
module.exports = removeHook;
function removeHook(state, name, method) {
if (!state.registry[name]) {
return;
}
var index = state.registry[name]
.map(function (registered) {
return registered.orig;
})
.indexOf(method);
if (index === -1) {
return;
}
state.registry[name].splice(index, 1);
}
/***/ }), /***/ }),
/***/ 537: /***/ 9051:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; "use strict";
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
Object.defineProperty(exports, "__esModule", ({ value: true })); /* istanbul ignore next */
const LCHOWN = fs.lchown ? 'lchown' : 'chown'
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } /* istanbul ignore next */
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync'
var deprecation = __nccwpck_require__(8932); /* istanbul ignore next */
var once = _interopDefault(__nccwpck_require__(1223)); const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
const logOnceCode = once(deprecation => console.warn(deprecation)); const lchownSync = (path, uid, gid) => {
const logOnceHeaders = once(deprecation => console.warn(deprecation)); try {
/** return fs[LCHOWNSYNC](path, uid, gid)
* Error with extra properties to help with debugging } catch (er) {
*/ if (er.code !== 'ENOENT')
throw er
}
}
class RequestError extends Error { /* istanbul ignore next */
constructor(message, statusCode, options) { const chownSync = (path, uid, gid) => {
super(message); // Maintains proper stack trace (only available on V8) try {
return fs.chownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'ENOENT')
throw er
}
}
/* istanbul ignore next */ /* istanbul ignore next */
const handleEISDIR =
needEISDIRHandled ? (path, uid, gid, cb) => er => {
// Node prior to v10 had a very questionable implementation of
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
}
: (_, __, ___, cb) => cb
if (Error.captureStackTrace) { /* istanbul ignore next */
Error.captureStackTrace(this, this.constructor); const handleEISDirSync =
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
} }
}
: (path, uid, gid) => lchownSync(path, uid, gid)
this.name = "HttpError"; // fs.readdir could only accept an options object as of node v6
this.status = statusCode; const nodeVersion = process.version
let headers; let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
if ("headers" in options && typeof options.headers !== "undefined") { const chown = (cpath, uid, gid, cb) => {
headers = options.headers; fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
} // Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
}
if ("response" in options) { const chownrKid = (p, child, uid, gid, cb) => {
this.response = options.response; if (typeof child === 'string')
headers = options.response.headers; return fs.lstat(path.resolve(p, child), (er, stats) => {
} // redact request credentials without mutating original request options // Skip ENOENT error
if (er)
return cb(er.code !== 'ENOENT' ? er : null)
stats.name = child
chownrKid(p, stats, uid, gid, cb)
})
if (child.isDirectory()) {
chownr(path.resolve(p, child.name), uid, gid, er => {
if (er)
return cb(er)
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
})
} else {
const cpath = path.resolve(p, child.name)
chown(cpath, uid, gid, cb)
}
}
const requestCopy = Object.assign({}, options.request);
if (options.request.headers.authorization) { const chownr = (p, uid, gid, cb) => {
requestCopy.headers = Object.assign({}, options.request.headers, { readdir(p, { withFileTypes: true }, (er, children) => {
authorization: options.request.headers.authorization.replace(/ .*$/, " [REDACTED]") // any error other than ENOTDIR or ENOTSUP means it's not readable,
}); // or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
} }
if (er || !children.length)
return chown(p, uid, gid, cb)
requestCopy.url = requestCopy.url // client_id & client_secret can be passed as URL query parameters to increase rate limit let len = children.length
// see https://developer.github.com/v3/#increasing-the-unauthenticated-rate-limit-for-oauth-applications let errState = null
.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]") // OAuth tokens can be passed as URL query parameters, although it is not recommended const then = er => {
// see https://developer.github.com/v3/#oauth2-token-sent-in-a-header if (errState)
.replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); return
this.request = requestCopy; // deprecations if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
Object.defineProperty(this, "code", { children.forEach(child => chownrKid(p, child, uid, gid, then))
get() { })
logOnceCode(new deprecation.Deprecation("[@octokit/request-error] `error.code` is deprecated, use `error.status`.")); }
return statusCode;
}
}); const chownrKidSync = (p, child, uid, gid) => {
Object.defineProperty(this, "headers", { if (typeof child === 'string') {
get() { try {
logOnceHeaders(new deprecation.Deprecation("[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`.")); const stats = fs.lstatSync(path.resolve(p, child))
return headers || {}; stats.name = child
} child = stats
} catch (er) {
if (er.code === 'ENOENT')
return
else
throw er
}
}
}); if (child.isDirectory())
chownrSync(path.resolve(p, child.name), uid, gid)
handleEISDirSync(path.resolve(p, child.name), uid, gid)
}
const chownrSync = (p, uid, gid) => {
let children
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
} }
if (children && children.length)
children.forEach(child => chownrKidSync(p, child, uid, gid))
return handleEISDirSync(p, uid, gid)
} }
exports.RequestError = RequestError; module.exports = chownr
//# sourceMappingURL=index.js.map chownr.sync = chownrSync
/***/ }), /***/ }),
/***/ 6234: /***/ 8932:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { /***/ ((__unused_webpack_module, exports) => {
"use strict"; "use strict";
Object.defineProperty(exports, "__esModule", ({ value: true })); Object.defineProperty(exports, "__esModule", ({ value: true }));
function _interopDefault (ex) { return (ex && (typeof ex === 'object') && 'default' in ex) ? ex['default'] : ex; } class Deprecation extends Error {
constructor(message) {
var endpoint = __nccwpck_require__(9440); super(message); // Maintains proper stack trace (only available on V8)
var universalUserAgent = __nccwpck_require__(5030);
var isPlainObject = __nccwpck_require__(3287);
var nodeFetch = _interopDefault(__nccwpck_require__(467));
var requestError = __nccwpck_require__(537);
const VERSION = "5.6.3";
function getBufferResponse(response) {
return response.arrayBuffer();
}
function fetchWrapper(requestOptions) {
const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console;
if (isPlainObject.isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) {
requestOptions.body = JSON.stringify(requestOptions.body);
}
let headers = {}; /* istanbul ignore next */
let status;
let url;
const fetch = requestOptions.request && requestOptions.request.fetch || nodeFetch;
return fetch(requestOptions.url, Object.assign({
method: requestOptions.method,
body: requestOptions.body,
headers: requestOptions.headers,
redirect: requestOptions.redirect
}, // `requestOptions.request.agent` type is incompatible
// see https://github.com/octokit/types.ts/pull/264
requestOptions.request)).then(async response => {
url = response.url;
status = response.status;
for (const keyAndValue of response.headers) { if (Error.captureStackTrace) {
headers[keyAndValue[0]] = keyAndValue[1]; Error.captureStackTrace(this, this.constructor);
} }
if ("deprecation" in headers) { this.name = 'Deprecation';
const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); }
const deprecationLink = matches && matches.pop();
log.warn(`[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}`);
}
if (status === 204 || status === 205) { }
return;
} // GitHub API returns 200 for HEAD requests
exports.Deprecation = Deprecation;
if (requestOptions.method === "HEAD") {
if (status < 400) {
return;
}
throw new requestError.RequestError(response.statusText, status, { /***/ }),
response: {
url,
status,
headers,
data: undefined
},
request: requestOptions
});
}
if (status === 304) { /***/ 7714:
throw new requestError.RequestError("Not modified", status, { /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
response: {
url,
status,
headers,
data: await getResponseData(response)
},
request: requestOptions
});
}
if (status >= 400) { "use strict";
const data = await getResponseData(response);
const error = new requestError.RequestError(toErrorMessage(data), status, {
response: {
url,
status,
headers,
data
},
request: requestOptions
});
throw error;
}
return getResponseData(response); const MiniPass = __nccwpck_require__(2505)
}).then(data => { const EE = (__nccwpck_require__(2361).EventEmitter)
return { const fs = __nccwpck_require__(7147)
status,
url,
headers,
data
};
}).catch(error => {
if (error instanceof requestError.RequestError) throw error;
throw new requestError.RequestError(error.message, 500, {
request: requestOptions
});
});
}
async function getResponseData(response) { let writev = fs.writev
const contentType = response.headers.get("content-type"); /* istanbul ignore next */
if (!writev) {
// This entire block can be removed if support for earlier than Node.js
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
if (/application\/json/.test(contentType)) { writev = (fd, iovec, pos, cb) => {
return response.json(); const done = (er, bw) => cb(er, bw, iovec)
const req = new FSReqWrap()
req.oncomplete = done
binding.writeBuffers(fd, iovec, pos, req)
} }
}
if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { const _autoClose = Symbol('_autoClose')
return response.text(); const _close = Symbol('_close')
} const _ended = Symbol('_ended')
const _fd = Symbol('_fd')
const _finished = Symbol('_finished')
const _flags = Symbol('_flags')
const _flush = Symbol('_flush')
const _handleChunk = Symbol('_handleChunk')
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
return getBufferResponse(response); class ReadStream extends MiniPass {
} constructor (path, opt) {
opt = opt || {}
super(opt)
function toErrorMessage(data) { this.readable = true
if (typeof data === "string") return data; // istanbul ignore else - just in case this.writable = false
if ("message" in data) { if (typeof path !== 'string')
if (Array.isArray(data.errors)) { throw new TypeError('path must be a string')
return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}`;
}
return data.message; this[_errored] = false
} // istanbul ignore next - just in case this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_path] = path
this[_readSize] = opt.readSize || 16*1024*1024
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number')
this[_read]()
else
this[_open]()
}
return `Unknown error: ${JSON.stringify(data)}`; get fd () { return this[_fd] }
} get path () { return this[_path] }
function withDefaults(oldEndpoint, newDefaults) { write () {
const endpoint = oldEndpoint.defaults(newDefaults); throw new TypeError('this is a readable stream')
}
const newApi = function (route, parameters) { end () {
const endpointOptions = endpoint.merge(route, parameters); throw new TypeError('this is a readable stream')
}
if (!endpointOptions.request || !endpointOptions.request.hook) { [_open] () {
return fetchWrapper(endpoint.parse(endpointOptions)); fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd))
} }
const request = (route, parameters) => { [_onopen] (er, fd) {
return fetchWrapper(endpoint.parse(endpoint.merge(route, parameters))); if (er)
}; this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
}
Object.assign(request, { [_makeBuf] () {
endpoint, return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain]))
defaults: withDefaults.bind(null, endpoint) }
});
return endpointOptions.request.hook(request, endpointOptions);
};
return Object.assign(newApi, { [_read] () {
endpoint, if (!this[_reading]) {
defaults: withDefaults.bind(null, endpoint) this[_reading] = true
}); const buf = this[_makeBuf]()
} /* istanbul ignore if */
if (buf.length === 0)
return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
}
}
const request = withDefaults(endpoint.endpoint, { [_onread] (er, br, buf) {
headers: { this[_reading] = false
"user-agent": `octokit-request.js/${VERSION} ${universalUserAgent.getUserAgent()}` if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
} }
});
exports.request = request; [_close] () {
//# sourceMappingURL=index.js.map if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
}
[_onerror] (er) {
this[_reading] = true
this[_close]()
this.emit('error', er)
}
/***/ }), [_handleChunk] (br, buf) {
let ret = false
// no effect if infinite
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
/***/ 3682: if (br === 0 || this[_remain] <= 0) {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { ret = false
this[_close]()
super.end()
}
var register = __nccwpck_require__(4670) return ret
var addHook = __nccwpck_require__(5549) }
var removeHook = __nccwpck_require__(6819)
// bind with array of arguments: https://stackoverflow.com/a/21792913 emit (ev, data) {
var bind = Function.bind switch (ev) {
var bindable = bind.bind(bind) case 'prefinish':
case 'finish':
break
function bindApi (hook, state, name) { case 'drain':
var removeHookRef = bindable(removeHook, null).apply(null, name ? [state, name] : [state]) if (typeof this[_fd] === 'number')
hook.api = { remove: removeHookRef } this[_read]()
hook.remove = removeHookRef break
;['before', 'error', 'after', 'wrap'].forEach(function (kind) { case 'error':
var args = name ? [state, kind, name] : [state, kind] if (this[_errored])
hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args) return
}) this[_errored] = true
} return super.emit(ev, data)
function HookSingular () { default:
var singularHookName = 'h' return super.emit(ev, data)
var singularHookState = { }
registry: {}
} }
var singularHook = register.bind(null, singularHookState, singularHookName)
bindApi(singularHook, singularHookState, singularHookName)
return singularHook
} }
function HookCollection () { class ReadStreamSync extends ReadStream {
var state = { [_open] () {
registry: {} let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
} }
var hook = register.bind(null, state) [_read] () {
bindApi(hook, state) let threw = true
try {
return hook if (!this[_reading]) {
} this[_reading] = true
do {
const buf = this[_makeBuf]()
/* istanbul ignore next */
const br = buf.length === 0 ? 0
: fs.readSync(this[_fd], buf, 0, buf.length, null)
if (!this[_handleChunk](br, buf))
break
} while (true)
this[_reading] = false
}
threw = false
} finally {
if (threw)
this[_close]()
}
}
var collectionHookDeprecationMessageDisplayed = false [_close] () {
function Hook () { if (this[_autoClose] && typeof this[_fd] === 'number') {
if (!collectionHookDeprecationMessageDisplayed) { const fd = this[_fd]
console.warn('[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4') this[_fd] = null
collectionHookDeprecationMessageDisplayed = true fs.closeSync(fd)
this.emit('close')
}
} }
return HookCollection()
} }
Hook.Singular = HookSingular.bind() class WriteStream extends EE {
Hook.Collection = HookCollection.bind() constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
module.exports = Hook // truncating makes no sense when writing into the middle
// expose constructors as a named property for TypeScript const defaultFlag = this[_pos] !== null ? 'r+' : 'w'
module.exports.Hook = Hook this[_defaultFlag] = opt.flags === undefined
module.exports.Singular = Hook.Singular this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags
module.exports.Collection = Hook.Collection
if (this[_fd] === null)
this[_open]()
}
/***/ }), emit (ev, data) {
if (ev === 'error') {
if (this[_errored])
return
this[_errored] = true
}
return super.emit(ev, data)
}
/***/ 5549:
/***/ ((module) => {
module.exports = addHook; get fd () { return this[_fd] }
get path () { return this[_path] }
function addHook(state, kind, name, hook) { [_onerror] (er) {
var orig = hook; this[_close]()
if (!state.registry[name]) { this[_writing] = true
state.registry[name] = []; this.emit('error', er)
} }
if (kind === "before") { [_open] () {
hook = function (method, options) { fs.open(this[_path], this[_flags], this[_mode],
return Promise.resolve() (er, fd) => this[_onopen](er, fd))
.then(orig.bind(null, options))
.then(method.bind(null, options));
};
} }
if (kind === "after") { [_onopen] (er, fd) {
hook = function (method, options) { if (this[_defaultFlag] &&
var result; this[_flags] === 'r+' &&
return Promise.resolve() er && er.code === 'ENOENT') {
.then(method.bind(null, options)) this[_flags] = 'w'
.then(function (result_) { this[_open]()
result = result_; } else if (er)
return orig(result, options); this[_onerror](er)
}) else {
.then(function () { this[_fd] = fd
return result; this.emit('open', fd)
}); this[_flush]()
}; }
} }
if (kind === "error") { end (buf, enc) {
hook = function (method, options) { if (buf)
return Promise.resolve() this.write(buf, enc)
.then(method.bind(null, options))
.catch(function (error) { this[_ended] = true
return orig(error, options);
}); // synthetic after-write logic, where drain/finish live
}; if (!this[_writing] && !this[_queue].length &&
typeof this[_fd] === 'number')
this[_onwrite](null, 0)
return this
} }
state.registry[name].push({ write (buf, enc) {
hook: hook, if (typeof buf === 'string')
orig: orig, buf = Buffer.from(buf, enc)
});
}
if (this[_ended]) {
this.emit('error', new Error('write() after end()'))
return false
}
/***/ }), if (this[_fd] === null || this[_writing] || this[_queue].length) {
this[_queue].push(buf)
this[_needDrain] = true
return false
}
/***/ 4670: this[_writing] = true
/***/ ((module) => { this[_write](buf)
return true
}
module.exports = register; [_write] (buf) {
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) =>
this[_onwrite](er, bw))
}
function register(state, name, method, options) { [_onwrite] (er, bw) {
if (typeof method !== "function") { if (er)
throw new Error("method for before hook must be a function"); this[_onerror](er)
else {
if (this[_pos] !== null)
this[_pos] += bw
if (this[_queue].length)
this[_flush]()
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) {
this[_finished] = true
this[_close]()
this.emit('finish')
} else if (this[_needDrain]) {
this[_needDrain] = false
this.emit('drain')
}
}
}
}
[_flush] () {
if (this[_queue].length === 0) {
if (this[_ended])
this[_onwrite](null, 0)
} else if (this[_queue].length === 1)
this[_write](this[_queue].pop())
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
}
} }
if (!options) { [_close] () {
options = {}; if (this[_autoClose] && typeof this[_fd] === 'number') {
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
} }
}
if (Array.isArray(name)) { class WriteStreamSync extends WriteStream {
return name.reverse().reduce(function (callback, name) { [_open] () {
return register.bind(null, state, name, callback, options); let fd
}, method)(); // only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
} else
fd = fs.openSync(this[_path], this[_flags], this[_mode])
this[_onopen](null, fd)
} }
return Promise.resolve().then(function () { [_close] () {
if (!state.registry[name]) { if (this[_autoClose] && typeof this[_fd] === 'number') {
return method(options); const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
} }
}
return state.registry[name].reduce(function (method, registered) { [_write] (buf) {
return registered.hook.bind(null, method, options); // throw the original, but try to close if it fails
}, method)(); let threw = true
}); try {
this[_onwrite](null,
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos]))
threw = false
} finally {
if (threw)
try { this[_close]() } catch (_) {}
}
}
} }
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
/***/ }), exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
/***/ 6819:
/***/ ((module) => {
module.exports = removeHook;
function removeHook(state, name, method) { /***/ }),
if (!state.registry[name]) {
return;
}
var index = state.registry[name] /***/ 2505:
.map(function (registered) { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
return registered.orig;
})
.indexOf(method);
if (index === -1) { "use strict";
return;
}
state.registry[name].splice(index, 1); const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
} }
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const SD = (__nccwpck_require__(1576).StringDecoder)
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
/***/ }), const defer = fn => Promise.resolve().then(fn)
/***/ 9051:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; // TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
const fs = __nccwpck_require__(7147) // events that mean 'the stream is over'
const path = __nccwpck_require__(1017) // these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
/* istanbul ignore next */ const isArrayBuffer = b => b instanceof ArrayBuffer ||
const LCHOWN = fs.lchown ? 'lchown' : 'chown' typeof b === 'object' &&
/* istanbul ignore next */ b.constructor &&
const LCHOWNSYNC = fs.lchownSync ? 'lchownSync' : 'chownSync' b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
/* istanbul ignore next */ const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
const needEISDIRHandled = fs.lchown &&
!process.version.match(/v1[1-9]+\./) &&
!process.version.match(/v10\.[6-9]/)
const lchownSync = (path, uid, gid) => { class Pipe {
try { constructor (src, dest, opts) {
return fs[LCHOWNSYNC](path, uid, gid) this.src = src
} catch (er) { this.dest = dest
if (er.code !== 'ENOENT') this.opts = opts
throw er this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
} }
} unpipe () {
this.dest.removeListener('drain', this.ondrain)
/* istanbul ignore next */ }
const chownSync = (path, uid, gid) => { // istanbul ignore next - only here for the prototype
try { proxyErrors () {}
return fs.chownSync(path, uid, gid) end () {
} catch (er) { this.unpipe()
if (er.code !== 'ENOENT') if (this.opts.end)
throw er this.dest.end()
} }
} }
/* istanbul ignore next */ class PipeProxyErrors extends Pipe {
const handleEISDIR = unpipe () {
needEISDIRHandled ? (path, uid, gid, cb) => er => { this.src.removeListener('error', this.proxyErrors)
// Node prior to v10 had a very questionable implementation of super.unpipe()
// fs.lchown, which would always try to call fs.open on a directory
// Fall back to fs.chown in those cases.
if (!er || er.code !== 'EISDIR')
cb(er)
else
fs.chown(path, uid, gid, cb)
} }
: (_, __, ___, cb) => cb constructor (src, dest, opts) {
super(src, dest, opts)
/* istanbul ignore next */ this.proxyErrors = er => dest.emit('error', er)
const handleEISDirSync = src.on('error', this.proxyErrors)
needEISDIRHandled ? (path, uid, gid) => {
try {
return lchownSync(path, uid, gid)
} catch (er) {
if (er.code !== 'EISDIR')
throw er
chownSync(path, uid, gid)
}
} }
: (path, uid, gid) => lchownSync(path, uid, gid)
// fs.readdir could only accept an options object as of node v6
const nodeVersion = process.version
let readdir = (path, options, cb) => fs.readdir(path, options, cb)
let readdirSync = (path, options) => fs.readdirSync(path, options)
/* istanbul ignore next */
if (/^v4\./.test(nodeVersion))
readdir = (path, options, cb) => fs.readdir(path, cb)
const chown = (cpath, uid, gid, cb) => {
fs[LCHOWN](cpath, uid, gid, handleEISDIR(cpath, uid, gid, er => {
// Skip ENOENT error
cb(er && er.code !== 'ENOENT' ? er : null)
}))
} }
const chownrKid = (p, child, uid, gid, cb) => { module.exports = class Minipass extends Stream {
if (typeof child === 'string') constructor (options) {
return fs.lstat(path.resolve(p, child), (er, stats) => { super()
// Skip ENOENT error this[FLOWING] = false
if (er) // whether we're explicitly paused
return cb(er.code !== 'ENOENT' ? er : null) this[PAUSED] = false
stats.name = child this.pipes = []
chownrKid(p, stats, uid, gid, cb) this.buffer = []
}) this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
if (child.isDirectory()) { this[ENCODING] = null
chownr(path.resolve(p, child.name), uid, gid, er => { else
if (er) this[ENCODING] = options && options.encoding || null
return cb(er) if (this[ENCODING] === 'buffer')
const cpath = path.resolve(p, child.name) this[ENCODING] = null
chown(cpath, uid, gid, cb) this[ASYNC] = options && !!options.async || false
}) this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
} else { this[EOF] = false
const cpath = path.resolve(p, child.name) this[EMITTED_END] = false
chown(cpath, uid, gid, cb) this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
} }
}
const chownr = (p, uid, gid, cb) => {
readdir(p, { withFileTypes: true }, (er, children) => {
// any error other than ENOTDIR or ENOTSUP means it's not readable,
// or doesn't exist. give up.
if (er) {
if (er.code === 'ENOENT')
return cb()
else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP')
return cb(er)
}
if (er || !children.length)
return chown(p, uid, gid, cb)
let len = children.length get bufferLength () { return this[BUFFERLENGTH] }
let errState = null
const then = er => {
if (errState)
return
if (er)
return cb(errState = er)
if (-- len === 0)
return chown(p, uid, gid, cb)
}
children.forEach(child => chownrKid(p, child, uid, gid, then)) get encoding () { return this[ENCODING] }
}) set encoding (enc) {
} if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
const chownrKidSync = (p, child, uid, gid) => { if (this[ENCODING] && enc !== this[ENCODING] &&
if (typeof child === 'string') { (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
try { throw new Error('cannot change encoding')
const stats = fs.lstatSync(path.resolve(p, child))
stats.name = child if (this[ENCODING] !== enc) {
child = stats this[DECODER] = enc ? new SD(enc) : null
} catch (er) { if (this.buffer.length)
if (er.code === 'ENOENT') this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
return
else
throw er
} }
this[ENCODING] = enc
} }
if (child.isDirectory()) setEncoding (enc) {
chownrSync(path.resolve(p, child.name), uid, gid) this.encoding = enc
}
handleEISDirSync(path.resolve(p, child.name), uid, gid) get objectMode () { return this[OBJECTMODE] }
} set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
const chownrSync = (p, uid, gid) => { get ['async'] () { return this[ASYNC] }
let children set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
try {
children = readdirSync(p, { withFileTypes: true })
} catch (er) {
if (er.code === 'ENOENT')
return
else if (er.code === 'ENOTDIR' || er.code === 'ENOTSUP')
return handleEISDirSync(p, uid, gid)
else
throw er
}
if (children && children.length) write (chunk, encoding, cb) {
children.forEach(child => chownrKidSync(p, child, uid, gid)) if (this[EOF])
throw new Error('write after end')
return handleEISDirSync(p, uid, gid) if (this[DESTROYED]) {
} this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
module.exports = chownr if (typeof encoding === 'function')
chownr.sync = chownrSync cb = encoding, encoding = 'utf8'
if (!encoding)
encoding = 'utf8'
/***/ }), const fn = this[ASYNC] ? defer : f => f()
/***/ 8932: // convert array buffers and typed array views into buffers
/***/ ((__unused_webpack_module, exports) => { // at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
"use strict"; // handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
Object.defineProperty(exports, "__esModule", ({ value: true })); if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
class Deprecation extends Error { if (cb)
constructor(message) { fn(cb)
super(message); // Maintains proper stack trace (only available on V8)
/* istanbul ignore next */ return this.flowing
}
if (Error.captureStackTrace) { // at this point the chunk is a buffer or string
Error.captureStackTrace(this, this.constructor); // don't buffer it up or send it to the decoder
if (!chunk.length) {
if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
fn(cb)
return this.flowing
} }
this.name = 'Deprecation'; // fast-path writing strings of same encoding to a stream with
} // an empty buffer, skipping the buffer/decoder dance
if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
}
} if (Buffer.isBuffer(chunk) && this[ENCODING])
chunk = this[DECODER].write(chunk)
exports.Deprecation = Deprecation; // Note: flushing CAN potentially switch us into not-flowing mode
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
/***/ }), if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
/***/ 7714: if (cb)
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { fn(cb)
"use strict"; return this.flowing
}
const MiniPass = __nccwpck_require__(1077) read (n) {
const EE = (__nccwpck_require__(2361).EventEmitter) if (this[DESTROYED])
const fs = __nccwpck_require__(7147) return null
let writev = fs.writev if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
/* istanbul ignore next */ this[MAYBE_EMIT_END]()
if (!writev) { return null
// This entire block can be removed if support for earlier than Node.js }
// 12.9.0 is not needed.
const binding = process.binding('fs')
const FSReqWrap = binding.FSReqWrap || binding.FSReqCallback
writev = (fd, iovec, pos, cb) => { if (this[OBJECTMODE])
const done = (er, bw) => cb(er, bw, iovec) n = null
const req = new FSReqWrap()
req.oncomplete = done if (this.buffer.length > 1 && !this[OBJECTMODE]) {
binding.writeBuffers(fd, iovec, pos, req) if (this.encoding)
this.buffer = [this.buffer.join('')]
else
this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
}
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
} }
}
const _autoClose = Symbol('_autoClose') [READ] (n, chunk) {
const _close = Symbol('_close') if (n === chunk.length || n === null)
const _ended = Symbol('_ended') this[BUFFERSHIFT]()
const _fd = Symbol('_fd') else {
const _finished = Symbol('_finished') this.buffer[0] = chunk.slice(n)
const _flags = Symbol('_flags') chunk = chunk.slice(0, n)
const _flush = Symbol('_flush') this[BUFFERLENGTH] -= n
const _handleChunk = Symbol('_handleChunk') }
const _makeBuf = Symbol('_makeBuf')
const _mode = Symbol('_mode')
const _needDrain = Symbol('_needDrain')
const _onerror = Symbol('_onerror')
const _onopen = Symbol('_onopen')
const _onread = Symbol('_onread')
const _onwrite = Symbol('_onwrite')
const _open = Symbol('_open')
const _path = Symbol('_path')
const _pos = Symbol('_pos')
const _queue = Symbol('_queue')
const _read = Symbol('_read')
const _readSize = Symbol('_readSize')
const _reading = Symbol('_reading')
const _remain = Symbol('_remain')
const _size = Symbol('_size')
const _write = Symbol('_write')
const _writing = Symbol('_writing')
const _defaultFlag = Symbol('_defaultFlag')
const _errored = Symbol('_errored')
class ReadStream extends MiniPass { this.emit('data', chunk)
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = true if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
}
end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false this.writable = false
if (typeof path !== 'string') // if we haven't written anything, then go ahead and emit,
throw new TypeError('path must be a string') // even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
}
this[_errored] = false // don't let the internal resume be overwritten
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null [RESUME] () {
this[_path] = path if (this[DESTROYED])
this[_readSize] = opt.readSize || 16*1024*1024 return
this[_reading] = false
this[_size] = typeof opt.size === 'number' ? opt.size : Infinity
this[_remain] = this[_size]
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
if (typeof this[_fd] === 'number') this[PAUSED] = false
this[_read]() this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else else
this[_open]() this.emit('drain')
} }
get fd () { return this[_fd] } resume () {
get path () { return this[_path] } return this[RESUME]()
}
write () { pause () {
throw new TypeError('this is a readable stream') this[FLOWING] = false
this[PAUSED] = true
} }
end () { get destroyed () {
throw new TypeError('this is a readable stream') return this[DESTROYED]
} }
[_open] () { get flowing () {
fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)) return this[FLOWING]
} }
[_onopen] (er, fd) { get paused () {
if (er) return this[PAUSED]
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_read]()
}
} }
[_makeBuf] () { [BUFFERPUSH] (chunk) {
return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])) if (this[OBJECTMODE])
this[BUFFERLENGTH] += 1
else
this[BUFFERLENGTH] += chunk.length
this.buffer.push(chunk)
} }
[_read] () { [BUFFERSHIFT] () {
if (!this[_reading]) { if (this.buffer.length) {
this[_reading] = true if (this[OBJECTMODE])
const buf = this[_makeBuf]() this[BUFFERLENGTH] -= 1
/* istanbul ignore if */ else
if (buf.length === 0) this[BUFFERLENGTH] -= this.buffer[0].length
return process.nextTick(() => this[_onread](null, 0, buf))
fs.read(this[_fd], buf, 0, buf.length, null, (er, br, buf) =>
this[_onread](er, br, buf))
} }
return this.buffer.shift()
} }
[_onread] (er, br, buf) { [FLUSH] (noDrain) {
this[_reading] = false do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
if (er)
this[_onerror](er)
else if (this[_handleChunk](br, buf))
this[_read]()
}
[_close] () { if (!noDrain && !this.buffer.length && !this[EOF])
if (this[_autoClose] && typeof this[_fd] === 'number') { this.emit('drain')
const fd = this[_fd]
this[_fd] = null
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close'))
}
} }
[_onerror] (er) { [FLUSHCHUNK] (chunk) {
this[_reading] = true return chunk ? (this.emit('data', chunk), this.flowing) : false
this[_close]()
this.emit('error', er)
} }
[_handleChunk] (br, buf) { pipe (dest, opts) {
let ret = false if (this[DESTROYED])
// no effect if infinite return
this[_remain] -= br
if (br > 0)
ret = super.write(br < buf.length ? buf.slice(0, br) : buf)
if (br === 0 || this[_remain] <= 0) { const ended = this[EMITTED_END]
ret = false opts = opts || {}
this[_close]() if (dest === proc.stdout || dest === proc.stderr)
super.end() opts.end = false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
// piping an ended stream ends immediately
if (ended) {
if (opts.end)
dest.end()
} else {
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts))
if (this[ASYNC])
defer(() => this[RESUME]())
else
this[RESUME]()
} }
return ret return dest
} }
emit (ev, data) { unpipe (dest) {
switch (ev) { const p = this.pipes.find(p => p.dest === dest)
case 'prefinish': if (p) {
case 'finish': this.pipes.splice(this.pipes.indexOf(p), 1)
break p.unpipe()
case 'drain':
if (typeof this[_fd] === 'number')
this[_read]()
break
case 'error':
if (this[_errored])
return
this[_errored] = true
return super.emit(ev, data)
default:
return super.emit(ev, data)
} }
} }
}
class ReadStreamSync extends ReadStream { addListener (ev, fn) {
[_open] () { return this.on(ev, fn)
let threw = true
try {
this[_onopen](null, fs.openSync(this[_path], 'r'))
threw = false
} finally {
if (threw)
this[_close]()
}
} }
[_read] () { on (ev, fn) {
let threw = true const ret = super.on(ev, fn)
try { if (ev === 'data' && !this.pipes.length && !this.flowing)
if (!this[_reading]) { this[RESUME]()
this[_reading] = true else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
do { super.emit('readable')
const buf = this[_makeBuf]() else if (isEndish(ev) && this[EMITTED_END]) {
/* istanbul ignore next */ super.emit(ev)
const br = buf.length === 0 ? 0 this.removeAllListeners(ev)
: fs.readSync(this[_fd], buf, 0, buf.length, null) } else if (ev === 'error' && this[EMITTED_ERROR]) {
if (!this[_handleChunk](br, buf)) if (this[ASYNC])
break defer(() => fn.call(this, this[EMITTED_ERROR]))
} while (true) else
this[_reading] = false fn.call(this, this[EMITTED_ERROR])
}
threw = false
} finally {
if (threw)
this[_close]()
} }
return ret
} }
[_close] () { get emittedEnd () {
if (this[_autoClose] && typeof this[_fd] === 'number') { return this[EMITTED_END]
const fd = this[_fd]
this[_fd] = null
fs.closeSync(fd)
this.emit('close')
}
} }
}
class WriteStream extends EE {
constructor (path, opt) {
opt = opt || {}
super(opt)
this.readable = false
this.writable = true
this[_errored] = false
this[_writing] = false
this[_ended] = false
this[_needDrain] = false
this[_queue] = []
this[_path] = path
this[_fd] = typeof opt.fd === 'number' ? opt.fd : null
this[_mode] = opt.mode === undefined ? 0o666 : opt.mode
this[_pos] = typeof opt.start === 'number' ? opt.start : null
this[_autoClose] = typeof opt.autoClose === 'boolean' ?
opt.autoClose : true
// truncating makes no sense when writing into the middle [MAYBE_EMIT_END] () {
const defaultFlag = this[_pos] !== null ? 'r+' : 'w' if (!this[EMITTING_END] &&
this[_defaultFlag] = opt.flags === undefined !this[EMITTED_END] &&
this[_flags] = this[_defaultFlag] ? defaultFlag : opt.flags !this[DESTROYED] &&
this.buffer.length === 0 &&
if (this[_fd] === null) this[EOF]) {
this[_open]() this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
}
} }
emit (ev, data) { emit (ev, data, ...extra) {
if (ev === 'error') { // error and close are only events allowed after calling destroy()
if (this[_errored]) if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
return !data ? false
: this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return return
this[_errored] = true const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
} }
return super.emit(ev, data)
}
// Some other unknown event
get fd () { return this[_fd] } const ret = super.emit(ev, data, ...extra)
get path () { return this[_path] } this[MAYBE_EMIT_END]()
return ret
[_onerror] (er) {
this[_close]()
this[_writing] = true
this.emit('error', er)
}
[_open] () {
fs.open(this[_path], this[_flags], this[_mode],
(er, fd) => this[_onopen](er, fd))
} }
[_onopen] (er, fd) { [EMITDATA] (data) {
if (this[_defaultFlag] && for (const p of this.pipes) {
this[_flags] === 'r+' && if (p.dest.write(data) === false)
er && er.code === 'ENOENT') { this.pause()
this[_flags] = 'w'
this[_open]()
} else if (er)
this[_onerror](er)
else {
this[_fd] = fd
this.emit('open', fd)
this[_flush]()
} }
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
} }
end (buf, enc) { [EMITEND] () {
if (buf) if (this[EMITTED_END])
this.write(buf, enc) return
this[_ended] = true
// synthetic after-write logic, where drain/finish live this[EMITTED_END] = true
if (!this[_writing] && !this[_queue].length && this.readable = false
typeof this[_fd] === 'number') if (this[ASYNC])
this[_onwrite](null, 0) defer(() => this[EMITEND2]())
return this else
this[EMITEND2]()
} }
write (buf, enc) { [EMITEND2] () {
if (typeof buf === 'string') if (this[DECODER]) {
buf = Buffer.from(buf, enc) const data = this[DECODER].end()
if (data) {
if (this[_ended]) { for (const p of this.pipes) {
this.emit('error', new Error('write() after end()')) p.dest.write(data)
return false }
super.emit('data', data)
}
} }
if (this[_fd] === null || this[_writing] || this[_queue].length) { for (const p of this.pipes) {
this[_queue].push(buf) p.end()
this[_needDrain] = true
return false
} }
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
}
this[_writing] = true // const all = await stream.collect()
this[_write](buf) collect () {
return true const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
} }
[_write] (buf) { // const data = await stream.concat()
fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => concat () {
this[_onwrite](er, bw)) return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
} }
[_onwrite] (er, bw) { // stream.promise().then(() => done, er => emitted error)
if (er) promise () {
this[_onerror](er) return new Promise((resolve, reject) => {
else { this.on(DESTROYED, () => reject(new Error('stream destroyed')))
if (this[_pos] !== null) this.on('error', er => reject(er))
this[_pos] += bw this.on('end', () => resolve())
if (this[_queue].length) })
this[_flush]() }
else {
this[_writing] = false
if (this[_ended] && !this[_finished]) { // for await (let chunk of stream)
this[_finished] = true [ASYNCITERATOR] () {
this[_close]() const next = () => {
this.emit('finish') const res = this.read()
} else if (this[_needDrain]) { if (res !== null)
this[_needDrain] = false return Promise.resolve({ done: false, value: res })
this.emit('drain')
} if (this[EOF])
return Promise.resolve({ done: true })
let resolve = null
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
} }
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
} }
return { next }
} }
[_flush] () { // for (let chunk of stream)
if (this[_queue].length === 0) { [ITERATOR] () {
if (this[_ended]) const next = () => {
this[_onwrite](null, 0) const value = this.read()
} else if (this[_queue].length === 1) const done = value === null
this[_write](this[_queue].pop()) return { value, done }
else {
const iovec = this[_queue]
this[_queue] = []
writev(this[_fd], iovec, this[_pos],
(er, bw) => this[_onwrite](er, bw))
} }
return { next }
} }
[_close] () { destroy (er) {
if (this[_autoClose] && typeof this[_fd] === 'number') { if (this[DESTROYED]) {
const fd = this[_fd] if (er)
this[_fd] = null this.emit('error', er)
fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')) else
this.emit(DESTROYED)
return this
} }
}
}
class WriteStreamSync extends WriteStream { this[DESTROYED] = true
[_open] () {
let fd
// only wrap in a try{} block if we know we'll retry, to avoid
// the rethrow obscuring the error's source frame in most cases.
if (this[_defaultFlag] && this[_flags] === 'r+') {
try {
fd = fs.openSync(this[_path], this[_flags], this[_mode])
} catch (er) {
if (er.code === 'ENOENT') {
this[_flags] = 'w'
return this[_open]()
} else
throw er
}
} else
fd = fs.openSync(this[_path], this[_flags], this[_mode])
this[_onopen](null, fd) // throw away all buffered data, it's never coming out
} this.buffer.length = 0
this[BUFFERLENGTH] = 0
[_close] () { if (typeof this.close === 'function' && !this[CLOSED])
if (this[_autoClose] && typeof this[_fd] === 'number') { this.close()
const fd = this[_fd]
this[_fd] = null if (er)
fs.closeSync(fd) this.emit('error', er)
this.emit('close') else // if no error to emit, still reject pending promises
} this.emit(DESTROYED)
return this
} }
[_write] (buf) { static isStream (s) {
// throw the original, but try to close if it fails return !!s && (s instanceof Minipass || s instanceof Stream ||
let threw = true s instanceof EE && (
try { typeof s.pipe === 'function' || // readable
this[_onwrite](null, (typeof s.write === 'function' && typeof s.end === 'function') // writable
fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])) ))
threw = false
} finally {
if (threw)
try { this[_close]() } catch (_) {}
}
} }
} }
exports.ReadStream = ReadStream
exports.ReadStreamSync = ReadStreamSync
exports.WriteStream = WriteStream
exports.WriteStreamSync = WriteStreamSync
/***/ }), /***/ }),
...@@ -5915,1041 +6463,1135 @@ exports.isPlainObject = isPlainObject; ...@@ -5915,1041 +6463,1135 @@ exports.isPlainObject = isPlainObject;
/***/ }), /***/ }),
/***/ 1077: /***/ 6769:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// Update with any zlib constants that are added or changed in the future.
// Node v6 didn't export this, so we just hard code the version and rely
// on all the other hard-coded values from zlib v4736. When node v6
// support drops, we can just export the realZlibConstants object.
const realZlibConstants = (__nccwpck_require__(9796).constants) ||
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 }
module.exports = Object.freeze(Object.assign(Object.create(null), {
Z_NO_FLUSH: 0,
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_MEM_ERROR: -4,
Z_BUF_ERROR: -5,
Z_VERSION_ERROR: -6,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
DEFLATE: 1,
INFLATE: 2,
GZIP: 3,
GUNZIP: 4,
DEFLATERAW: 5,
INFLATERAW: 6,
UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15,
Z_MIN_CHUNK: 64,
Z_MAX_CHUNK: Infinity,
Z_DEFAULT_CHUNK: 16384,
Z_MIN_MEMLEVEL: 1,
Z_MAX_MEMLEVEL: 9,
Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1,
BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))
/***/ }),
/***/ 3486:
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict"; "use strict";
const proc = typeof process === 'object' && process ? process : {
stdout: null,
stderr: null,
}
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const Yallist = __nccwpck_require__(665)
const SD = (__nccwpck_require__(1576).StringDecoder)
const EOF = Symbol('EOF')
const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
const EMITTING_END = Symbol('emittingEnd')
const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
// events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
const isArrayBuffer = b => b instanceof ArrayBuffer ||
typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = class Minipass extends Stream {
constructor (options) {
super()
this[FLOWING] = false
// whether we're explicitly paused
this[PAUSED] = false
this.pipes = new Yallist()
this.buffer = new Yallist()
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
get bufferLength () { return this[BUFFERLENGTH] } const assert = __nccwpck_require__(9491)
const Buffer = (__nccwpck_require__(4300).Buffer)
const realZlib = __nccwpck_require__(9796)
get encoding () { return this[ENCODING] } const constants = exports.constants = __nccwpck_require__(6769)
set encoding (enc) { const Minipass = __nccwpck_require__(7557)
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
if (this[ENCODING] && enc !== this[ENCODING] && const OriginalBufferConcat = Buffer.concat
(this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (this[ENCODING] !== enc) { const _superWrite = Symbol('_superWrite')
this[DECODER] = enc ? new SD(enc) : null class ZlibError extends Error {
if (this.buffer.length) constructor (err) {
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk)) super('zlib: ' + err.message)
} this.code = err.code
this.errno = err.errno
/* istanbul ignore if */
if (!this.code)
this.code = 'ZLIB_ERROR'
this[ENCODING] = enc this.message = 'zlib: ' + err.message
Error.captureStackTrace(this, this.constructor)
} }
setEncoding (enc) { get name () {
this.encoding = enc return 'ZlibError'
} }
}
get objectMode () { return this[OBJECTMODE] } // the Zlib class they all inherit from
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om } // This thing manages the queue of requests, and returns
// true or false if there is anything in the queue when
write (chunk, encoding, cb) { // you call the .write() method.
if (this[EOF]) const _opts = Symbol('opts')
throw new Error('write after end') const _flushFlag = Symbol('flushFlag')
const _finishFlushFlag = Symbol('finishFlushFlag')
if (this[DESTROYED]) { const _fullFlushFlag = Symbol('fullFlushFlag')
this.emit('error', Object.assign( const _handle = Symbol('handle')
new Error('Cannot call write after a stream was destroyed'), const _onError = Symbol('onError')
{ code: 'ERR_STREAM_DESTROYED' } const _sawError = Symbol('sawError')
)) const _level = Symbol('level')
return true const _strategy = Symbol('strategy')
} const _ended = Symbol('ended')
const _defaultFullFlush = Symbol('_defaultFullFlush')
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (!encoding) class ZlibBase extends Minipass {
encoding = 'utf8' constructor (opts, mode) {
if (!opts || typeof opts !== 'object')
throw new TypeError('invalid options for ZlibBase constructor')
// convert array buffers and typed array views into buffers super(opts)
// at some point in the future, we may want to do the opposite! this[_sawError] = false
// leave strings and buffers as-is this[_ended] = false
// anything else switches us into object mode this[_opts] = opts
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
// this ensures at this point that the chunk is a buffer or string this[_flushFlag] = opts.flush
// don't buffer it up or send it to the decoder this[_finishFlushFlag] = opts.finishFlush
if (!this.objectMode && !chunk.length) { // this will throw if any options are invalid for the class selected
if (this[BUFFERLENGTH] !== 0) try {
this.emit('readable') this[_handle] = new realZlib[mode](opts)
if (cb) } catch (er) {
cb() // make sure that all errors get decorated properly
return this.flowing throw new ZlibError(er)
} }
// fast-path writing strings of same encoding to a stream with this[_onError] = (err) => {
// an empty buffer, skipping the buffer/decoder dance // no sense raising multiple errors, since we abort on the first one.
if (typeof chunk === 'string' && !this[OBJECTMODE] && if (this[_sawError])
// unless it is a string already ready for us to use return
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding) this[_sawError] = true
// there is no way to cleanly recover.
// continuing only obscures problems.
this.close()
this.emit('error', err)
} }
if (Buffer.isBuffer(chunk) && this[ENCODING]) this[_handle].on('error', er => this[_onError](new ZlibError(er)))
chunk = this[DECODER].write(chunk) this.once('end', () => this.close)
}
if (this.flowing) { close () {
// if we somehow have something in the buffer, but we think we're if (this[_handle]) {
// flowing, then we need to flush all that out first, or we get this[_handle].close()
// chunks coming in out of order. Can't emit 'drain' here though, this[_handle] = null
// because we're mid-write, so that'd be bad. this.emit('close')
if (this[BUFFERLENGTH] !== 0) }
this[FLUSH](true) }
this.emit('data', chunk)
} else
this[BUFFERPUSH](chunk)
if (this[BUFFERLENGTH] !== 0) reset () {
this.emit('readable') if (!this[_sawError]) {
assert(this[_handle], 'zlib binding closed')
return this[_handle].reset()
}
}
if (cb) flush (flushFlag) {
cb() if (this.ended)
return
return this.flowing if (typeof flushFlag !== 'number')
flushFlag = this[_fullFlushFlag]
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
} }
read (n) { end (chunk, encoding, cb) {
if (this[DESTROYED]) if (chunk)
return null this.write(chunk, encoding)
this.flush(this[_finishFlushFlag])
this[_ended] = true
return super.end(null, null, cb)
}
try { get ended () {
if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) return this[_ended]
return null }
if (this[OBJECTMODE]) write (chunk, encoding, cb) {
n = null // process the chunk using the sync process
// then super.write() all the outputted chunks
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (this.buffer.length > 1 && !this[OBJECTMODE]) { if (typeof chunk === 'string')
if (this.encoding) chunk = Buffer.from(chunk, encoding)
this.buffer = new Yallist([
Array.from(this.buffer).join('') if (this[_sawError])
]) return
else assert(this[_handle], 'zlib binding closed')
this.buffer = new Yallist([
Buffer.concat(Array.from(this.buffer), this[BUFFERLENGTH])
])
}
return this[READ](n || null, this.buffer.head.value) // _processChunk tries to .close() the native handle after it's done, so we
// intercept that by temporarily making it a no-op.
const nativeHandle = this[_handle]._handle
const originalNativeClose = nativeHandle.close
nativeHandle.close = () => {}
const originalClose = this[_handle].close
this[_handle].close = () => {}
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
Buffer.concat = (args) => args
let result
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
? chunk[_flushFlag] : this[_flushFlag]
result = this[_handle]._processChunk(chunk, flushFlag)
// if we don't throw, reset it back how it was
Buffer.concat = OriginalBufferConcat
} catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
Buffer.concat = OriginalBufferConcat
this[_onError](new ZlibError(err))
} finally { } finally {
this[MAYBE_EMIT_END]() if (this[_handle]) {
// Core zlib resets `_handle` to null after attempting to close the
// native handle. Our no-op handler prevented actual closure, but we
// need to restore the `._handle` property.
this[_handle]._handle = nativeHandle
nativeHandle.close = originalNativeClose
this[_handle].close = originalClose
// `_processChunk()` adds an 'error' listener. If we don't remove it
// after each call, these handlers start piling up.
this[_handle].removeAllListeners('error')
// make sure OUR error listener is still attached tho
}
} }
}
[READ] (n, chunk) { if (this[_handle])
if (n === chunk.length || n === null) this[_handle].on('error', er => this[_onError](new ZlibError(er)))
this[BUFFERSHIFT]()
else {
this.buffer.head.value = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
this.emit('data', chunk) let writeReturn
if (result) {
if (Array.isArray(result) && result.length > 0) {
// The first buffer is always `handle._outBuffer`, which would be
// re-used for later invocations; so, we always have to copy that one.
writeReturn = this[_superWrite](Buffer.from(result[0]))
for (let i = 1; i < result.length; i++) {
writeReturn = this[_superWrite](result[i])
}
} else {
writeReturn = this[_superWrite](Buffer.from(result))
}
}
if (!this.buffer.length && !this[EOF]) if (cb)
this.emit('drain') cb()
return writeReturn
}
return chunk [_superWrite] (data) {
return super.write(data)
} }
}
end (chunk, encoding, cb) { class Zlib extends ZlibBase {
if (typeof chunk === 'function') constructor (opts, mode) {
cb = chunk, chunk = null opts = opts || {}
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit, opts.flush = opts.flush || constants.Z_NO_FLUSH
// even if we're not reading. opts.finishFlush = opts.finishFlush || constants.Z_FINISH
// we'll re-emit if a new 'end' listener is added anyway. super(opts, mode)
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED]) this[_fullFlushFlag] = constants.Z_FULL_FLUSH
this[MAYBE_EMIT_END]() this[_level] = opts.level
return this this[_strategy] = opts.strategy
} }
// don't let the internal resume be overwritten params (level, strategy) {
[RESUME] () { if (this[_sawError])
if (this[DESTROYED])
return return
this[PAUSED] = false if (!this[_handle])
this[FLOWING] = true throw new Error('cannot switch params when binding is closed')
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
}
resume () { // no way to test this without also not supporting params at all
return this[RESUME]() /* istanbul ignore if */
if (!this[_handle].params)
throw new Error('not supported in this implementation')
if (this[_level] !== level || this[_strategy] !== strategy) {
this.flush(constants.Z_SYNC_FLUSH)
assert(this[_handle], 'zlib binding closed')
// .params() calls .flush(), but the latter is always async in the
// core zlib. We override .flush() temporarily to intercept that and
// flush synchronously.
const origFlush = this[_handle].flush
this[_handle].flush = (flushFlag, cb) => {
this.flush(flushFlag)
cb()
}
try {
this[_handle].params(level, strategy)
} finally {
this[_handle].flush = origFlush
}
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
}
} }
}
pause () { // minimal 2-byte header
this[FLOWING] = false class Deflate extends Zlib {
this[PAUSED] = true constructor (opts) {
super(opts, 'Deflate')
} }
}
get destroyed () { class Inflate extends Zlib {
return this[DESTROYED] constructor (opts) {
super(opts, 'Inflate')
} }
}
get flowing () { // gzip - bigger header, same deflate compression
return this[FLOWING] const _portable = Symbol('_portable')
class Gzip extends Zlib {
constructor (opts) {
super(opts, 'Gzip')
this[_portable] = opts && !!opts.portable
} }
get paused () { [_superWrite] (data) {
return this[PAUSED] if (!this[_portable])
return super[_superWrite](data)
// we'll always get the header emitted in one first chunk
// overwrite the OS indicator byte with 0xFF
this[_portable] = false
data[9] = 255
return super[_superWrite](data)
} }
}
[BUFFERPUSH] (chunk) { class Gunzip extends Zlib {
if (this[OBJECTMODE]) constructor (opts) {
this[BUFFERLENGTH] += 1 super(opts, 'Gunzip')
else
this[BUFFERLENGTH] += chunk.length
return this.buffer.push(chunk)
} }
}
[BUFFERSHIFT] () { // raw - no header
if (this.buffer.length) { class DeflateRaw extends Zlib {
if (this[OBJECTMODE]) constructor (opts) {
this[BUFFERLENGTH] -= 1 super(opts, 'DeflateRaw')
else
this[BUFFERLENGTH] -= this.buffer.head.value.length
}
return this.buffer.shift()
} }
}
[FLUSH] (noDrain) { class InflateRaw extends Zlib {
do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]())) constructor (opts) {
super(opts, 'InflateRaw')
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
} }
}
[FLUSHCHUNK] (chunk) { // auto-detect header.
return chunk ? (this.emit('data', chunk), this.flowing) : false class Unzip extends Zlib {
constructor (opts) {
super(opts, 'Unzip')
} }
}
pipe (dest, opts) { class Brotli extends ZlibBase {
if (this[DESTROYED]) constructor (opts, mode) {
return
const ended = this[EMITTED_END]
opts = opts || {} opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
const p = { dest: dest, opts: opts, ondrain: _ => this[RESUME]() } opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS
this.pipes.push(p) opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH
dest.on('drain', p.ondrain) super(opts, mode)
this[RESUME]()
// piping an ended stream ends immediately
if (ended && p.opts.end)
p.dest.end()
return dest
}
addListener (ev, fn) { this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH
return this.on(ev, fn)
} }
}
on (ev, fn) { class BrotliCompress extends Brotli {
try { constructor (opts) {
return super.on(ev, fn) super(opts, 'BrotliCompress')
} finally {
if (ev === 'data' && !this.pipes.length && !this.flowing)
this[RESUME]()
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
fn.call(this, this[EMITTED_ERROR])
}
}
} }
}
get emittedEnd () { class BrotliDecompress extends Brotli {
return this[EMITTED_END] constructor (opts) {
super(opts, 'BrotliDecompress')
} }
}
[MAYBE_EMIT_END] () { exports.Deflate = Deflate
if (!this[EMITTING_END] && exports.Inflate = Inflate
!this[EMITTED_END] && exports.Gzip = Gzip
!this[DESTROYED] && exports.Gunzip = Gunzip
this.buffer.length === 0 && exports.DeflateRaw = DeflateRaw
this[EOF]) { exports.InflateRaw = InflateRaw
this[EMITTING_END] = true exports.Unzip = Unzip
this.emit('end') /* istanbul ignore else */
this.emit('prefinish') if (typeof realZlib.BrotliCompress === 'function') {
this.emit('finish') exports.BrotliCompress = BrotliCompress
if (this[CLOSED]) exports.BrotliDecompress = BrotliDecompress
this.emit('close') } else {
this[EMITTING_END] = false exports.BrotliCompress = exports.BrotliDecompress = class {
constructor () {
throw new Error('Brotli is not supported in this version of Node.js')
} }
} }
}
emit (ev, data) {
// error and close are only events allowed after calling destroy()
if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
return
else if (ev === 'data') {
if (!data)
return
if (this.pipes.length)
this.pipes.forEach(p =>
p.dest.write(data) === false && this.pause())
} else if (ev === 'end') {
// only actual end gets this treatment
if (this[EMITTED_END] === true)
return
this[EMITTED_END] = true
this.readable = false
if (this[DECODER]) { /***/ }),
data = this[DECODER].end()
if (data) {
this.pipes.forEach(p => p.dest.write(data))
super.emit('data', data)
}
}
this.pipes.forEach(p => { /***/ 7557:
p.dest.removeListener('drain', p.ondrain) /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (p.opts.end)
p.dest.end()
})
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
}
// TODO: replace with a spread operator when Node v4 support drops "use strict";
const args = new Array(arguments.length)
args[0] = ev
args[1] = data
if (arguments.length > 2) {
for (let i = 2; i < arguments.length; i++) {
args[i] = arguments[i]
}
}
try { const proc = typeof process === 'object' && process ? process : {
return super.emit.apply(this, args) stdout: null,
} finally { stderr: null,
if (!isEndish(ev)) }
this[MAYBE_EMIT_END]() const EE = __nccwpck_require__(2361)
else const Stream = __nccwpck_require__(2781)
this.removeAllListeners(ev) const SD = (__nccwpck_require__(1576).StringDecoder)
}
}
// const all = await stream.collect() const EOF = Symbol('EOF')
collect () { const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const buf = [] const EMITTED_END = Symbol('emittedEnd')
if (!this[OBJECTMODE]) const EMITTING_END = Symbol('emittingEnd')
buf.dataLength = 0 const EMITTED_ERROR = Symbol('emittedError')
// set the promise first, in case an error is raised const CLOSED = Symbol('closed')
// by triggering the flow here. const READ = Symbol('read')
const p = this.promise() const FLUSH = Symbol('flush')
this.on('data', c => { const FLUSHCHUNK = Symbol('flushChunk')
buf.push(c) const ENCODING = Symbol('encoding')
if (!this[OBJECTMODE]) const DECODER = Symbol('decoder')
buf.dataLength += c.length const FLOWING = Symbol('flowing')
}) const PAUSED = Symbol('paused')
return p.then(() => buf) const RESUME = Symbol('resume')
} const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
// const data = await stream.concat() const defer = fn => Promise.resolve().then(fn)
concat () {
return this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
// stream.promise().then(() => done, er => emitted error) // TODO remove when Node v8 support drops
promise () { const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
return new Promise((resolve, reject) => { const ASYNCITERATOR = doIter && Symbol.asyncIterator
this.on(DESTROYED, () => reject(new Error('stream destroyed'))) || Symbol('asyncIterator not implemented')
this.on('error', er => reject(er)) const ITERATOR = doIter && Symbol.iterator
this.on('end', () => resolve()) || Symbol('iterator not implemented')
})
}
// for await (let chunk of stream) // events that mean 'the stream is over'
[ASYNCITERATOR] () { // these are treated specially, and re-emitted
const next = () => { // if they are listened for after emitting.
const res = this.read() const isEndish = ev =>
if (res !== null) ev === 'end' ||
return Promise.resolve({ done: false, value: res }) ev === 'finish' ||
ev === 'prefinish'
if (this[EOF]) const isArrayBuffer = b => b instanceof ArrayBuffer ||
return Promise.resolve({ done: true }) typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
let resolve = null const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
let reject = null
const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
return { next } class Pipe {
constructor (src, dest, opts) {
this.src = src
this.dest = dest
this.opts = opts
this.ondrain = () => src[RESUME]()
dest.on('drain', this.ondrain)
} }
unpipe () {
this.dest.removeListener('drain', this.ondrain)
}
// istanbul ignore next - only here for the prototype
proxyErrors () {}
end () {
this.unpipe()
if (this.opts.end)
this.dest.end()
}
}
// for (let chunk of stream) class PipeProxyErrors extends Pipe {
[ITERATOR] () { unpipe () {
const next = () => { this.src.removeListener('error', this.proxyErrors)
const value = this.read() super.unpipe()
const done = value === null
return { value, done }
}
return { next }
} }
constructor (src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
}
destroy (er) { module.exports = class Minipass extends Stream {
if (this[DESTROYED]) { constructor (options) {
if (er) super()
this.emit('error', er) this[FLOWING] = false
else // whether we're explicitly paused
this.emit(DESTROYED) this[PAUSED] = false
return this this.pipes = []
} this.buffer = []
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
this[DESTROYED] = true get bufferLength () { return this[BUFFERLENGTH] }
// throw away all buffered data, it's never coming out get encoding () { return this[ENCODING] }
this.buffer = new Yallist() set encoding (enc) {
this[BUFFERLENGTH] = 0 if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
if (typeof this.close === 'function' && !this[CLOSED]) if (this[ENCODING] && enc !== this[ENCODING] &&
this.close() (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
throw new Error('cannot change encoding')
if (er) if (this[ENCODING] !== enc) {
this.emit('error', er) this[DECODER] = enc ? new SD(enc) : null
else // if no error to emit, still reject pending promises if (this.buffer.length)
this.emit(DESTROYED) this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
return this this[ENCODING] = enc
} }
static isStream (s) { setEncoding (enc) {
return !!s && (s instanceof Minipass || s instanceof Stream || this.encoding = enc
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
} }
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
/***/ }), get ['async'] () { return this[ASYNC] }
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
/***/ 6769: write (chunk, encoding, cb) {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { if (this[EOF])
throw new Error('write after end')
// Update with any zlib constants that are added or changed in the future. if (this[DESTROYED]) {
// Node v6 didn't export this, so we just hard code the version and rely this.emit('error', Object.assign(
// on all the other hard-coded values from zlib v4736. When node v6 new Error('Cannot call write after a stream was destroyed'),
// support drops, we can just export the realZlibConstants object. { code: 'ERR_STREAM_DESTROYED' }
const realZlibConstants = (__nccwpck_require__(9796).constants) || ))
/* istanbul ignore next */ { ZLIB_VERNUM: 4736 } return true
}
module.exports = Object.freeze(Object.assign(Object.create(null), { if (typeof encoding === 'function')
Z_NO_FLUSH: 0, cb = encoding, encoding = 'utf8'
Z_PARTIAL_FLUSH: 1,
Z_SYNC_FLUSH: 2,
Z_FULL_FLUSH: 3,
Z_FINISH: 4,
Z_BLOCK: 5,
Z_OK: 0,
Z_STREAM_END: 1,
Z_NEED_DICT: 2,
Z_ERRNO: -1,
Z_STREAM_ERROR: -2,
Z_DATA_ERROR: -3,
Z_MEM_ERROR: -4,
Z_BUF_ERROR: -5,
Z_VERSION_ERROR: -6,
Z_NO_COMPRESSION: 0,
Z_BEST_SPEED: 1,
Z_BEST_COMPRESSION: 9,
Z_DEFAULT_COMPRESSION: -1,
Z_FILTERED: 1,
Z_HUFFMAN_ONLY: 2,
Z_RLE: 3,
Z_FIXED: 4,
Z_DEFAULT_STRATEGY: 0,
DEFLATE: 1,
INFLATE: 2,
GZIP: 3,
GUNZIP: 4,
DEFLATERAW: 5,
INFLATERAW: 6,
UNZIP: 7,
BROTLI_DECODE: 8,
BROTLI_ENCODE: 9,
Z_MIN_WINDOWBITS: 8,
Z_MAX_WINDOWBITS: 15,
Z_DEFAULT_WINDOWBITS: 15,
Z_MIN_CHUNK: 64,
Z_MAX_CHUNK: Infinity,
Z_DEFAULT_CHUNK: 16384,
Z_MIN_MEMLEVEL: 1,
Z_MAX_MEMLEVEL: 9,
Z_DEFAULT_MEMLEVEL: 8,
Z_MIN_LEVEL: -1,
Z_MAX_LEVEL: 9,
Z_DEFAULT_LEVEL: -1,
BROTLI_OPERATION_PROCESS: 0,
BROTLI_OPERATION_FLUSH: 1,
BROTLI_OPERATION_FINISH: 2,
BROTLI_OPERATION_EMIT_METADATA: 3,
BROTLI_MODE_GENERIC: 0,
BROTLI_MODE_TEXT: 1,
BROTLI_MODE_FONT: 2,
BROTLI_DEFAULT_MODE: 0,
BROTLI_MIN_QUALITY: 0,
BROTLI_MAX_QUALITY: 11,
BROTLI_DEFAULT_QUALITY: 11,
BROTLI_MIN_WINDOW_BITS: 10,
BROTLI_MAX_WINDOW_BITS: 24,
BROTLI_LARGE_MAX_WINDOW_BITS: 30,
BROTLI_DEFAULT_WINDOW: 22,
BROTLI_MIN_INPUT_BLOCK_BITS: 16,
BROTLI_MAX_INPUT_BLOCK_BITS: 24,
BROTLI_PARAM_MODE: 0,
BROTLI_PARAM_QUALITY: 1,
BROTLI_PARAM_LGWIN: 2,
BROTLI_PARAM_LGBLOCK: 3,
BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4,
BROTLI_PARAM_SIZE_HINT: 5,
BROTLI_PARAM_LARGE_WINDOW: 6,
BROTLI_PARAM_NPOSTFIX: 7,
BROTLI_PARAM_NDIRECT: 8,
BROTLI_DECODER_RESULT_ERROR: 0,
BROTLI_DECODER_RESULT_SUCCESS: 1,
BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0,
BROTLI_DECODER_PARAM_LARGE_WINDOW: 1,
BROTLI_DECODER_NO_ERROR: 0,
BROTLI_DECODER_SUCCESS: 1,
BROTLI_DECODER_NEEDS_MORE_INPUT: 2,
BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1,
BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2,
BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4,
BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5,
BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6,
BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7,
BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9,
BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10,
BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11,
BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12,
BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13,
BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14,
BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15,
BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16,
BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19,
BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21,
BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22,
BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26,
BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27,
BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30,
BROTLI_DECODER_ERROR_UNREACHABLE: -31,
}, realZlibConstants))
if (!encoding)
encoding = 'utf8'
/***/ }), const fn = this[ASYNC] ? defer : f => f()
/***/ 3486: // convert array buffers and typed array views into buffers
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { // at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
"use strict"; // handle object mode up front, since it's simpler
// this yields better performance, fewer checks later.
if (this[OBJECTMODE]) {
/* istanbul ignore if - maybe impossible? */
if (this.flowing && this[BUFFERLENGTH] !== 0)
this[FLUSH](true)
if (this.flowing)
this.emit('data', chunk)
else
this[BUFFERPUSH](chunk)
const assert = __nccwpck_require__(9491) if (this[BUFFERLENGTH] !== 0)
const Buffer = (__nccwpck_require__(4300).Buffer) this.emit('readable')
const realZlib = __nccwpck_require__(9796)
const constants = exports.constants = __nccwpck_require__(6769) if (cb)
const Minipass = __nccwpck_require__(1077) fn(cb)
const OriginalBufferConcat = Buffer.concat return this.flowing
}
const _superWrite = Symbol('_superWrite') // at this point the chunk is a buffer or string
class ZlibError extends Error { // don't buffer it up or send it to the decoder
constructor (err) { if (!chunk.length) {
super('zlib: ' + err.message) if (this[BUFFERLENGTH] !== 0)
this.code = err.code this.emit('readable')
this.errno = err.errno if (cb)
/* istanbul ignore if */ fn(cb)
if (!this.code) return this.flowing
this.code = 'ZLIB_ERROR' }
this.message = 'zlib: ' + err.message // fast-path writing strings of same encoding to a stream with
Error.captureStackTrace(this, this.constructor) // an empty buffer, skipping the buffer/decoder dance
} if (typeof chunk === 'string' &&
// unless it is a string already ready for us to use
!(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
}
get name () { if (Buffer.isBuffer(chunk) && this[ENCODING])
return 'ZlibError' chunk = this[DECODER].write(chunk)
}
}
// the Zlib class they all inherit from // Note: flushing CAN potentially switch us into not-flowing mode
// This thing manages the queue of requests, and returns if (this.flowing && this[BUFFERLENGTH] !== 0)
// true or false if there is anything in the queue when this[FLUSH](true)
// you call the .write() method.
const _opts = Symbol('opts')
const _flushFlag = Symbol('flushFlag')
const _finishFlushFlag = Symbol('finishFlushFlag')
const _fullFlushFlag = Symbol('fullFlushFlag')
const _handle = Symbol('handle')
const _onError = Symbol('onError')
const _sawError = Symbol('sawError')
const _level = Symbol('level')
const _strategy = Symbol('strategy')
const _ended = Symbol('ended')
const _defaultFullFlush = Symbol('_defaultFullFlush')
class ZlibBase extends Minipass { if (this.flowing)
constructor (opts, mode) { this.emit('data', chunk)
if (!opts || typeof opts !== 'object') else
throw new TypeError('invalid options for ZlibBase constructor') this[BUFFERPUSH](chunk)
super(opts) if (this[BUFFERLENGTH] !== 0)
this[_sawError] = false this.emit('readable')
this[_ended] = false
this[_opts] = opts
this[_flushFlag] = opts.flush if (cb)
this[_finishFlushFlag] = opts.finishFlush fn(cb)
// this will throw if any options are invalid for the class selected
try {
this[_handle] = new realZlib[mode](opts)
} catch (er) {
// make sure that all errors get decorated properly
throw new ZlibError(er)
}
this[_onError] = (err) => { return this.flowing
// no sense raising multiple errors, since we abort on the first one. }
if (this[_sawError])
return
this[_sawError] = true read (n) {
if (this[DESTROYED])
return null
// there is no way to cleanly recover. if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
// continuing only obscures problems. this[MAYBE_EMIT_END]()
this.close() return null
this.emit('error', err)
} }
this[_handle].on('error', er => this[_onError](new ZlibError(er))) if (this[OBJECTMODE])
this.once('end', () => this.close) n = null
}
close () { if (this.buffer.length > 1 && !this[OBJECTMODE]) {
if (this[_handle]) { if (this.encoding)
this[_handle].close() this.buffer = [this.buffer.join('')]
this[_handle] = null else
this.emit('close') this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
} }
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
} }
reset () { [READ] (n, chunk) {
if (!this[_sawError]) { if (n === chunk.length || n === null)
assert(this[_handle], 'zlib binding closed') this[BUFFERSHIFT]()
return this[_handle].reset() else {
this.buffer[0] = chunk.slice(n)
chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
} }
}
flush (flushFlag) { this.emit('data', chunk)
if (this.ended)
return
if (typeof flushFlag !== 'number') if (!this.buffer.length && !this[EOF])
flushFlag = this[_fullFlushFlag] this.emit('drain')
this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag }))
return chunk
} }
end (chunk, encoding, cb) { end (chunk, encoding, cb) {
if (typeof chunk === 'function')
cb = chunk, chunk = null
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (chunk) if (chunk)
this.write(chunk, encoding) this.write(chunk, encoding)
this.flush(this[_finishFlushFlag]) if (cb)
this[_ended] = true this.once('end', cb)
return super.end(null, null, cb) this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
} }
get ended () { // don't let the internal resume be overwritten
return this[_ended] [RESUME] () {
if (this[DESTROYED])
return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
} }
write (chunk, encoding, cb) { resume () {
// process the chunk using the sync process return this[RESUME]()
// then super.write() all the outputted chunks }
if (typeof encoding === 'function')
cb = encoding, encoding = 'utf8'
if (typeof chunk === 'string') pause () {
chunk = Buffer.from(chunk, encoding) this[FLOWING] = false
this[PAUSED] = true
}
if (this[_sawError]) get destroyed () {
return return this[DESTROYED]
assert(this[_handle], 'zlib binding closed') }
// _processChunk tries to .close() the native handle after it's done, so we get flowing () {
// intercept that by temporarily making it a no-op. return this[FLOWING]
const nativeHandle = this[_handle]._handle }
const originalNativeClose = nativeHandle.close
nativeHandle.close = () => {}
const originalClose = this[_handle].close
this[_handle].close = () => {}
// It also calls `Buffer.concat()` at the end, which may be convenient
// for some, but which we are not interested in as it slows us down.
Buffer.concat = (args) => args
let result
try {
const flushFlag = typeof chunk[_flushFlag] === 'number'
? chunk[_flushFlag] : this[_flushFlag]
result = this[_handle]._processChunk(chunk, flushFlag)
// if we don't throw, reset it back how it was
Buffer.concat = OriginalBufferConcat
} catch (err) {
// or if we do, put Buffer.concat() back before we emit error
// Error events call into user code, which may call Buffer.concat()
Buffer.concat = OriginalBufferConcat
this[_onError](new ZlibError(err))
} finally {
if (this[_handle]) {
// Core zlib resets `_handle` to null after attempting to close the
// native handle. Our no-op handler prevented actual closure, but we
// need to restore the `._handle` property.
this[_handle]._handle = nativeHandle
nativeHandle.close = originalNativeClose
this[_handle].close = originalClose
// `_processChunk()` adds an 'error' listener. If we don't remove it
// after each call, these handlers start piling up.
this[_handle].removeAllListeners('error')
// make sure OUR error listener is still attached tho
}
}
if (this[_handle]) get paused () {
this[_handle].on('error', er => this[_onError](new ZlibError(er))) return this[PAUSED]
}
let writeReturn [BUFFERPUSH] (chunk) {
if (result) { if (this[OBJECTMODE])
if (Array.isArray(result) && result.length > 0) { this[BUFFERLENGTH] += 1
// The first buffer is always `handle._outBuffer`, which would be else
// re-used for later invocations; so, we always have to copy that one. this[BUFFERLENGTH] += chunk.length
writeReturn = this[_superWrite](Buffer.from(result[0])) this.buffer.push(chunk)
for (let i = 1; i < result.length; i++) { }
writeReturn = this[_superWrite](result[i])
} [BUFFERSHIFT] () {
} else { if (this.buffer.length) {
writeReturn = this[_superWrite](Buffer.from(result)) if (this[OBJECTMODE])
} this[BUFFERLENGTH] -= 1
else
this[BUFFERLENGTH] -= this.buffer[0].length
} }
return this.buffer.shift()
}
if (cb) [FLUSH] (noDrain) {
cb() do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
return writeReturn
if (!noDrain && !this.buffer.length && !this[EOF])
this.emit('drain')
} }
[_superWrite] (data) { [FLUSHCHUNK] (chunk) {
return super.write(data) return chunk ? (this.emit('data', chunk), this.flowing) : false
} }
}
class Zlib extends ZlibBase { pipe (dest, opts) {
constructor (opts, mode) { if (this[DESTROYED])
return
const ended = this[EMITTED_END]
opts = opts || {} opts = opts || {}
if (dest === proc.stdout || dest === proc.stderr)
opts.end = false
else
opts.end = opts.end !== false
opts.proxyErrors = !!opts.proxyErrors
opts.flush = opts.flush || constants.Z_NO_FLUSH // piping an ended stream ends immediately
opts.finishFlush = opts.finishFlush || constants.Z_FINISH if (ended) {
super(opts, mode) if (opts.end)
dest.end()
} else {
this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
: new PipeProxyErrors(this, dest, opts))
if (this[ASYNC])
defer(() => this[RESUME]())
else
this[RESUME]()
}
this[_fullFlushFlag] = constants.Z_FULL_FLUSH return dest
this[_level] = opts.level
this[_strategy] = opts.strategy
} }
params (level, strategy) { unpipe (dest) {
if (this[_sawError]) const p = this.pipes.find(p => p.dest === dest)
return if (p) {
this.pipes.splice(this.pipes.indexOf(p), 1)
if (!this[_handle]) p.unpipe()
throw new Error('cannot switch params when binding is closed') }
}
// no way to test this without also not supporting params at all addListener (ev, fn) {
/* istanbul ignore if */ return this.on(ev, fn)
if (!this[_handle].params) }
throw new Error('not supported in this implementation')
if (this[_level] !== level || this[_strategy] !== strategy) { on (ev, fn) {
this.flush(constants.Z_SYNC_FLUSH) const ret = super.on(ev, fn)
assert(this[_handle], 'zlib binding closed') if (ev === 'data' && !this.pipes.length && !this.flowing)
// .params() calls .flush(), but the latter is always async in the this[RESUME]()
// core zlib. We override .flush() temporarily to intercept that and else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
// flush synchronously. super.emit('readable')
const origFlush = this[_handle].flush else if (isEndish(ev) && this[EMITTED_END]) {
this[_handle].flush = (flushFlag, cb) => { super.emit(ev)
this.flush(flushFlag) this.removeAllListeners(ev)
cb() } else if (ev === 'error' && this[EMITTED_ERROR]) {
} if (this[ASYNC])
try { defer(() => fn.call(this, this[EMITTED_ERROR]))
this[_handle].params(level, strategy) else
} finally { fn.call(this, this[EMITTED_ERROR])
this[_handle].flush = origFlush
}
/* istanbul ignore else */
if (this[_handle]) {
this[_level] = level
this[_strategy] = strategy
}
} }
return ret
} }
}
// minimal 2-byte header get emittedEnd () {
class Deflate extends Zlib { return this[EMITTED_END]
constructor (opts) {
super(opts, 'Deflate')
} }
}
class Inflate extends Zlib { [MAYBE_EMIT_END] () {
constructor (opts) { if (!this[EMITTING_END] &&
super(opts, 'Inflate') !this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
}
} }
}
// gzip - bigger header, same deflate compression emit (ev, data, ...extra) {
const _portable = Symbol('_portable') // error and close are only events allowed after calling destroy()
class Gzip extends Zlib { if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
constructor (opts) { return
super(opts, 'Gzip') else if (ev === 'data') {
this[_portable] = opts && !!opts.portable return !data ? false
} : this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
}
[_superWrite] (data) { // Some other unknown event
if (!this[_portable]) const ret = super.emit(ev, data, ...extra)
return super[_superWrite](data) this[MAYBE_EMIT_END]()
return ret
}
// we'll always get the header emitted in one first chunk [EMITDATA] (data) {
// overwrite the OS indicator byte with 0xFF for (const p of this.pipes) {
this[_portable] = false if (p.dest.write(data) === false)
data[9] = 255 this.pause()
return super[_superWrite](data) }
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
} }
}
class Gunzip extends Zlib { [EMITEND] () {
constructor (opts) { if (this[EMITTED_END])
super(opts, 'Gunzip') return
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC])
defer(() => this[EMITEND2]())
else
this[EMITEND2]()
} }
}
// raw - no header [EMITEND2] () {
class DeflateRaw extends Zlib { if (this[DECODER]) {
constructor (opts) { const data = this[DECODER].end()
super(opts, 'DeflateRaw') if (data) {
for (const p of this.pipes) {
p.dest.write(data)
}
super.emit('data', data)
}
}
for (const p of this.pipes) {
p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
} }
}
class InflateRaw extends Zlib { // const all = await stream.collect()
constructor (opts) { collect () {
super(opts, 'InflateRaw') const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
} }
}
// auto-detect header. // const data = await stream.concat()
class Unzip extends Zlib { concat () {
constructor (opts) { return this[OBJECTMODE]
super(opts, 'Unzip') ? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
} }
}
class Brotli extends ZlibBase { // stream.promise().then(() => done, er => emitted error)
constructor (opts, mode) { promise () {
opts = opts || {} return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS // for await (let chunk of stream)
opts.finishFlush = opts.finishFlush || constants.BROTLI_OPERATION_FINISH [ASYNCITERATOR] () {
const next = () => {
const res = this.read()
if (res !== null)
return Promise.resolve({ done: false, value: res })
super(opts, mode) if (this[EOF])
return Promise.resolve({ done: true })
this[_fullFlushFlag] = constants.BROTLI_OPERATION_FLUSH let resolve = null
} let reject = null
} const onerr = er => {
this.removeListener('data', ondata)
this.removeListener('end', onend)
reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
}
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
}
class BrotliCompress extends Brotli { return { next }
constructor (opts) {
super(opts, 'BrotliCompress')
} }
}
class BrotliDecompress extends Brotli { // for (let chunk of stream)
constructor (opts) { [ITERATOR] () {
super(opts, 'BrotliDecompress') const next = () => {
const value = this.read()
const done = value === null
return { value, done }
}
return { next }
} }
}
exports.Deflate = Deflate destroy (er) {
exports.Inflate = Inflate if (this[DESTROYED]) {
exports.Gzip = Gzip if (er)
exports.Gunzip = Gunzip this.emit('error', er)
exports.DeflateRaw = DeflateRaw else
exports.InflateRaw = InflateRaw this.emit(DESTROYED)
exports.Unzip = Unzip return this
/* istanbul ignore else */
if (typeof realZlib.BrotliCompress === 'function') {
exports.BrotliCompress = BrotliCompress
exports.BrotliDecompress = BrotliDecompress
} else {
exports.BrotliCompress = exports.BrotliDecompress = class {
constructor () {
throw new Error('Brotli is not supported in this version of Node.js')
} }
this[DESTROYED] = true
// throw away all buffered data, it's never coming out
this.buffer.length = 0
this[BUFFERLENGTH] = 0
if (typeof this.close === 'function' && !this[CLOSED])
this.close()
if (er)
this.emit('error', er)
else // if no error to emit, still reject pending promises
this.emit(DESTROYED)
return this
}
static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
} }
} }
...@@ -11856,3601 +12498,4258 @@ const pos = (buf) => { ...@@ -11856,3601 +12498,4258 @@ const pos = (buf) => {
const onesComp = byte => (0xff ^ byte) & 0xff const onesComp = byte => (0xff ^ byte) & 0xff
const twosComp = byte => ((0xff ^ byte) + 1) & 0xff const twosComp = byte => ((0xff ^ byte) + 1) & 0xff
module.exports = {
encode,
parse,
}
/***/ }),
/***/ 1525:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// XXX: This shares a lot in common with extract.js
// maybe some DRY opportunity here?
// tar -t
const hlo = __nccwpck_require__(5274)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const path = __nccwpck_require__(1017)
const stripSlash = __nccwpck_require__(8886)
module.exports = (opt_, files, cb) => {
if (typeof opt_ === 'function')
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function')
cb = files, files = null
if (!files)
files = []
else
files = Array.from(files)
const opt = hlo(opt_)
if (opt.sync && typeof cb === 'function')
throw new TypeError('callback not supported for sync tar functions')
if (!opt.file && typeof cb === 'function')
throw new TypeError('callback only supported with file option')
if (files.length)
filesFilter(opt, files)
if (!opt.noResume)
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt)
: opt.file ? listFile(opt, cb)
: list(opt)
}
const onentryFunction = opt => {
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed
// include child entries if a dir is included
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
const mapHas = (file, r) => {
const root = r || path.parse(file).root || '.'
const ret = file === root ? false
: map.has(file) ? map.get(file)
: mapHas(path.dirname(file), root)
map.set(file, ret)
return ret
}
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
}
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
if (stat.size < readSize)
p.end(fs.readFileSync(file))
else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const listFile = (opt, cb) => {
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file
const p = new Promise((resolve, reject) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => {
if (er)
reject(er)
else {
const stream = new fsm.ReadStream(file, {
readSize: readSize,
size: stat.size,
})
stream.on('error', reject)
stream.pipe(parse)
}
})
})
return cb ? p.then(cb, cb) : p
}
const list = opt => new Parser(opt)
/***/ }),
/***/ 9624:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict";
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally
// passing around state in a gazillion args.
const mkdirp = __nccwpck_require__(6186)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const chownr = __nccwpck_require__(9051)
const normPath = __nccwpck_require__(6843)
class SymlinkError extends Error {
constructor (symlink, path) {
super('Cannot extract through symbolic link')
this.path = path
this.symlink = symlink
}
get name () {
return 'SylinkError'
}
}
class CwdError extends Error {
constructor (path, code) {
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
}
get name () {
return 'CwdError'
}
}
const cGet = (cache, key) => cache.get(normPath(key))
const cSet = (cache, key, val) => cache.set(normPath(key), val)
const checkCwd = (dir, cb) => {
fs.stat(dir, (er, st) => {
if (er || !st.isDirectory())
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
module.exports = (dir, opt, cb) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve
const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
const done = (er, created) => {
if (er)
cb(er)
else {
cSet(cache, dir, true)
if (created && doChown)
chownr(created, uid, gid, er => done(er))
else if (needChmod)
fs.chmod(dir, mode, cb)
else
cb()
}
}
if (cache && cGet(cache, dir) === true)
return done()
if (dir === cwd)
return checkCwd(dir, done)
if (preserve)
return mkdirp(dir, {mode}).then(made => done(null, made), done)
module.exports = { const sub = normPath(path.relative(cwd, dir))
encode, const parts = sub.split('/')
parse, mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
} }
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => {
if (!parts.length)
return cb(null, created)
const p = parts.shift()
const part = normPath(path.resolve(base + '/' + p))
if (cGet(cache, part))
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
}
/***/ }), const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => {
if (er) {
fs.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr)
} else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
} else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
}
}
/***/ 1525: const checkCwdSync = dir => {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { let ok = false
let code = 'ENOTDIR'
try {
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
}
}
"use strict"; module.exports.sync = (dir, opt) => {
dir = normPath(dir)
// if there's any overlap between mask and mode,
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid
const gid = opt.gid
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
// XXX: This shares a lot in common with extract.js const preserve = opt.preserve
// maybe some DRY opportunity here? const unlink = opt.unlink
const cache = opt.cache
const cwd = normPath(opt.cwd)
// tar -t const done = (created) => {
const hlo = __nccwpck_require__(5274) cSet(cache, dir, true)
const Parser = __nccwpck_require__(8917) if (created && doChown)
const fs = __nccwpck_require__(7147) chownr.sync(created, uid, gid)
const fsm = __nccwpck_require__(7714) if (needChmod)
const path = __nccwpck_require__(1017) fs.chmodSync(dir, mode)
const stripSlash = __nccwpck_require__(8886) }
module.exports = (opt_, files, cb) => { if (cache && cGet(cache, dir) === true)
if (typeof opt_ === 'function') return done()
cb = opt_, files = null, opt_ = {}
else if (Array.isArray(opt_))
files = opt_, opt_ = {}
if (typeof files === 'function') if (dir === cwd) {
cb = files, files = null checkCwdSync(cwd)
return done()
}
if (!files) if (preserve)
files = [] return done(mkdirp.sync(dir, mode))
else
files = Array.from(files)
const opt = hlo(opt_) const sub = normPath(path.relative(cwd, dir))
const parts = sub.split('/')
let created = null
for (let p = parts.shift(), part = cwd;
p && (part += '/' + p);
p = parts.shift()) {
part = normPath(path.resolve(part))
if (cGet(cache, part))
continue
if (opt.sync && typeof cb === 'function') try {
throw new TypeError('callback not supported for sync tar functions') fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
} catch (er) {
const st = fs.lstatSync(part)
if (st.isDirectory()) {
cSet(cache, part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
}
}
if (!opt.file && typeof cb === 'function') return done(created)
throw new TypeError('callback only supported with file option') }
if (files.length)
filesFilter(opt, files)
if (!opt.noResume) /***/ }),
onentryFunction(opt)
return opt.file && opt.sync ? listFileSync(opt) /***/ 8371:
: opt.file ? listFile(opt, cb) /***/ ((module) => {
: list(opt)
}
const onentryFunction = opt => { "use strict";
const onentry = opt.onentry
opt.onentry = onentry ? e => {
onentry(e)
e.resume()
} : e => e.resume()
}
// construct a filter that limits the file entries listed module.exports = (mode, isDir, portable) => {
// include child entries if a dir is included mode &= 0o7777
const filesFilter = (opt, files) => {
const map = new Map(files.map(f => [stripSlash(f), true]))
const filter = opt.filter
const mapHas = (file, r) => { // in portable mode, use the minimum reasonable umask
const root = r || path.parse(file).root || '.' // if this system creates files with 0o664 by default
const ret = file === root ? false // (as some linux distros do), then we'll write the
: map.has(file) ? map.get(file) // archive with 0o644 instead. Also, don't ever create
: mapHas(path.dirname(file), root) // a file that is not readable/writable by the owner.
if (portable)
mode = (mode | 0o600) & ~0o22
map.set(file, ret) // if dirs are readable, then they should be listable
return ret if (isDir) {
if (mode & 0o400)
mode |= 0o100
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
} }
return mode
opt.filter = filter
? (file, entry) => filter(file, entry) && mapHas(stripSlash(file))
: file => mapHas(stripSlash(file))
} }
const listFileSync = opt => {
const p = list(opt)
const file = opt.file
let threw = true
let fd
try {
const stat = fs.statSync(file)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
if (stat.size < readSize)
p.end(fs.readFileSync(file))
else {
let pos = 0
const buf = Buffer.allocUnsafe(readSize)
fd = fs.openSync(file, 'r')
while (pos < stat.size) {
const bytesRead = fs.readSync(fd, buf, 0, readSize, pos)
pos += bytesRead
p.write(buf.slice(0, bytesRead))
}
p.end()
}
threw = false
} finally {
if (threw && fd) {
try {
fs.closeSync(fd)
} catch (er) {}
}
}
}
const listFile = (opt, cb) => { /***/ }),
const parse = new Parser(opt)
const readSize = opt.maxReadSize || 16 * 1024 * 1024
const file = opt.file /***/ 7118:
const p = new Promise((resolve, reject) => { /***/ ((module) => {
parse.on('error', reject)
parse.on('end', resolve)
fs.stat(file, (er, stat) => { // warning: extremely hot code path.
if (er) // This has been meticulously optimized for use
reject(er) // within npm install on large package trees.
else { // Do not edit without careful benchmarking.
const stream = new fsm.ReadStream(file, { const normalizeCache = Object.create(null)
readSize: readSize, const {hasOwnProperty} = Object.prototype
size: stat.size, module.exports = s => {
}) if (!hasOwnProperty.call(normalizeCache, s))
stream.on('error', reject) normalizeCache[s] = s.normalize('NFKD')
stream.pipe(parse) return normalizeCache[s]
}
})
})
return cb ? p.then(cb, cb) : p
} }
const list = opt => new Parser(opt)
/***/ }),
/***/ 6843:
/***/ ((module) => {
// on windows, either \ or / are valid directory separators.
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')
/***/ }), /***/ }),
/***/ 9624: /***/ 7900:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; "use strict";
// wrapper around mkdirp for tar's needs.
// TODO: This should probably be a class, not functionally // A readable tar stream creator
// passing around state in a gazillion args. // Technically, this is a transform stream that you write paths into,
// and tar format comes out of.
// The `add()` method is like `write()` but returns this,
// and end() return `this` as well, so you can
// do `new Pack(opt).add('files').add('dir').end().pipe(output)
// You could also do something like:
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar'))
class PackJob {
constructor (path, absolute) {
this.path = path || './'
this.absolute = absolute
this.entry = null
this.stat = null
this.readdir = null
this.pending = false
this.ignore = false
this.piped = false
}
}
const MiniPass = __nccwpck_require__(6684)
const zlib = __nccwpck_require__(3486)
const ReadEntry = __nccwpck_require__(8116)
const WriteEntry = __nccwpck_require__(5450)
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = __nccwpck_require__(665)
const EOF = Buffer.alloc(1024)
const ONSTAT = Symbol('onStat')
const ENDED = Symbol('ended')
const QUEUE = Symbol('queue')
const CURRENT = Symbol('current')
const PROCESS = Symbol('process')
const PROCESSING = Symbol('processing')
const PROCESSJOB = Symbol('processJob')
const JOBS = Symbol('jobs')
const JOBDONE = Symbol('jobDone')
const ADDFSENTRY = Symbol('addFSEntry')
const ADDTARENTRY = Symbol('addTarEntry')
const STAT = Symbol('stat')
const READDIR = Symbol('readdir')
const ONREADDIR = Symbol('onreaddir')
const PIPE = Symbol('pipe')
const ENTRY = Symbol('entry')
const ENTRYOPT = Symbol('entryOpt')
const WRITEENTRYCLASS = Symbol('writeEntryClass')
const WRITE = Symbol('write')
const ONDRAIN = Symbol('ondrain')
const mkdirp = __nccwpck_require__(6186)
const fs = __nccwpck_require__(7147) const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017) const path = __nccwpck_require__(1017)
const chownr = __nccwpck_require__(9051) const warner = __nccwpck_require__(5899)
const normPath = __nccwpck_require__(6843) const normPath = __nccwpck_require__(6843)
class SymlinkError extends Error { const Pack = warner(class Pack extends MiniPass {
constructor (symlink, path) { constructor (opt) {
super('Cannot extract through symbolic link') super(opt)
this.path = path opt = opt || Object.create(null)
this.symlink = symlink this.opt = opt
} this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
get name () { this[WRITEENTRYCLASS] = WriteEntry
return 'SylinkError' if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
this.portable = !!opt.portable
this.zip = null
if (opt.gzip) {
if (typeof opt.gzip !== 'object')
opt.gzip = {}
if (this.portable)
opt.gzip.portable = true
this.zip = new zlib.Gzip(opt.gzip)
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.noDirRecurse = !!opt.noDirRecurse
this.follow = !!opt.follow
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true
this[QUEUE] = new Yallist()
this[JOBS] = 0
this.jobs = +opt.jobs || 4
this[PROCESSING] = false
this[ENDED] = false
} }
}
class CwdError extends Error { [WRITE] (chunk) {
constructor (path, code) { return super.write(chunk)
super(code + ': Cannot cd into \'' + path + '\'')
this.path = path
this.code = code
} }
get name () { add (path) {
return 'CwdError' this.write(path)
return this
} }
}
const cGet = (cache, key) => cache.get(normPath(key)) end (path) {
const cSet = (cache, key, val) => cache.set(normPath(key), val) if (path)
this.write(path)
this[ENDED] = true
this[PROCESS]()
return this
}
const checkCwd = (dir, cb) => { write (path) {
fs.stat(dir, (er, st) => { if (this[ENDED])
if (er || !st.isDirectory()) throw new Error('write after end')
er = new CwdError(dir, er && er.code || 'ENOTDIR')
cb(er)
})
}
module.exports = (dir, opt, cb) => { if (path instanceof ReadEntry)
dir = normPath(dir) this[ADDTARENTRY](path)
else
this[ADDFSENTRY](path)
return this.flowing
}
// if there's any overlap between mask and mode, [ADDTARENTRY] (p) {
// then we'll need an explicit chmod const absolute = normPath(path.resolve(this.cwd, p.path))
const umask = opt.umask // in this case, we don't have to wait for the stat
const mode = opt.mode | 0o0700 if (!this.filter(p.path, p))
const needChmod = (mode & umask) !== 0 p.resume()
else {
const job = new PackJob(p.path, absolute, false)
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job))
job.entry.on('end', _ => this[JOBDONE](job))
this[JOBS] += 1
this[QUEUE].push(job)
}
const uid = opt.uid this[PROCESS]()
const gid = opt.gid }
const doChown = typeof uid === 'number' &&
typeof gid === 'number' &&
(uid !== opt.processUid || gid !== opt.processGid)
const preserve = opt.preserve [ADDFSENTRY] (p) {
const unlink = opt.unlink const absolute = normPath(path.resolve(this.cwd, p))
const cache = opt.cache this[QUEUE].push(new PackJob(p, absolute))
const cwd = normPath(opt.cwd) this[PROCESS]()
}
const done = (er, created) => { [STAT] (job) {
if (er) job.pending = true
cb(er) this[JOBS] += 1
else { const stat = this.follow ? 'stat' : 'lstat'
cSet(cache, dir, true) fs[stat](job.absolute, (er, stat) => {
if (created && doChown) job.pending = false
chownr(created, uid, gid, er => done(er)) this[JOBS] -= 1
else if (needChmod) if (er)
fs.chmod(dir, mode, cb) this.emit('error', er)
else else
cb() this[ONSTAT](job, stat)
} })
} }
if (cache && cGet(cache, dir) === true) [ONSTAT] (job, stat) {
return done() this.statCache.set(job.absolute, stat)
job.stat = stat
// now we have the stat, we can filter it.
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]()
}
[READDIR] (job) {
job.pending = true
this[JOBS] += 1
fs.readdir(job.absolute, (er, entries) => {
job.pending = false
this[JOBS] -= 1
if (er)
return this.emit('error', er)
this[ONREADDIR](job, entries)
})
}
[ONREADDIR] (job, entries) {
this.readdirCache.set(job.absolute, entries)
job.readdir = entries
this[PROCESS]()
}
if (dir === cwd) [PROCESS] () {
return checkCwd(dir, done) if (this[PROCESSING])
return
if (preserve) this[PROCESSING] = true
return mkdirp(dir, {mode}).then(made => done(null, made), done) for (let w = this[QUEUE].head;
w !== null && this[JOBS] < this.jobs;
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
const sub = normPath(path.relative(cwd, dir)) this[PROCESSING] = false
const parts = sub.split('/')
mkdir_(cwd, parts, mode, cache, unlink, cwd, null, done)
}
const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) {
if (!parts.length) if (this.zip)
return cb(null, created) this.zip.end(EOF)
const p = parts.shift() else {
const part = normPath(path.resolve(base + '/' + p)) super.write(EOF)
if (cGet(cache, part)) super.end()
return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb) }
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)) }
} }
const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => er => { get [CURRENT] () {
if (er) { return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value
fs.lstat(part, (statEr, st) => {
if (statEr) {
statEr.path = statEr.path && normPath(statEr.path)
cb(statEr)
} else if (st.isDirectory())
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
else if (unlink) {
fs.unlink(part, er => {
if (er)
return cb(er)
fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb))
})
} else if (st.isSymbolicLink())
return cb(new SymlinkError(part, part + '/' + parts.join('/')))
else
cb(er)
})
} else {
created = created || part
mkdir_(part, parts, mode, cache, unlink, cwd, created, cb)
} }
}
const checkCwdSync = dir => { [JOBDONE] (job) {
let ok = false this[QUEUE].shift()
let code = 'ENOTDIR' this[JOBS] -= 1
try { this[PROCESS]()
ok = fs.statSync(dir).isDirectory()
} catch (er) {
code = er.code
} finally {
if (!ok)
throw new CwdError(dir, code)
} }
}
module.exports.sync = (dir, opt) => { [PROCESSJOB] (job) {
dir = normPath(dir) if (job.pending)
// if there's any overlap between mask and mode, return
// then we'll need an explicit chmod
const umask = opt.umask
const mode = opt.mode | 0o0700
const needChmod = (mode & umask) !== 0
const uid = opt.uid if (job.entry) {
const gid = opt.gid if (job === this[CURRENT] && !job.piped)
const doChown = typeof uid === 'number' && this[PIPE](job)
typeof gid === 'number' && return
(uid !== opt.processUid || gid !== opt.processGid) }
const preserve = opt.preserve if (!job.stat) {
const unlink = opt.unlink if (this.statCache.has(job.absolute))
const cache = opt.cache this[ONSTAT](job, this.statCache.get(job.absolute))
const cwd = normPath(opt.cwd) else
this[STAT](job)
}
if (!job.stat)
return
const done = (created) => { // filtered out!
cSet(cache, dir, true) if (job.ignore)
if (created && doChown) return
chownr.sync(created, uid, gid)
if (needChmod)
fs.chmodSync(dir, mode)
}
if (cache && cGet(cache, dir) === true) if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) {
return done() if (this.readdirCache.has(job.absolute))
this[ONREADDIR](job, this.readdirCache.get(job.absolute))
else
this[READDIR](job)
if (!job.readdir)
return
}
if (dir === cwd) { // we know it doesn't have an entry, because that got checked above
checkCwdSync(cwd) job.entry = this[ENTRY](job)
return done() if (!job.entry) {
} job.ignore = true
return
}
if (preserve) if (job === this[CURRENT] && !job.piped)
return done(mkdirp.sync(dir, mode)) this[PIPE](job)
}
const sub = normPath(path.relative(cwd, dir)) [ENTRYOPT] (job) {
const parts = sub.split('/') return {
let created = null onwarn: (code, msg, data) => this.warn(code, msg, data),
for (let p = parts.shift(), part = cwd; noPax: this.noPax,
p && (part += '/' + p); cwd: this.cwd,
p = parts.shift()) { absolute: job.absolute,
part = normPath(path.resolve(part)) preservePaths: this.preservePaths,
if (cGet(cache, part)) maxReadSize: this.maxReadSize,
continue strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
}
[ENTRY] (job) {
this[JOBS] += 1
try { try {
fs.mkdirSync(part, mode) return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job))
created = created || part .on('end', () => this[JOBDONE](job))
cSet(cache, part, true) .on('error', er => this.emit('error', er))
} catch (er) { } catch (er) {
const st = fs.lstatSync(part) this.emit('error', er)
if (st.isDirectory()) {
cSet(cache, part, true)
continue
} else if (unlink) {
fs.unlinkSync(part)
fs.mkdirSync(part, mode)
created = created || part
cSet(cache, part, true)
continue
} else if (st.isSymbolicLink())
return new SymlinkError(part, part + '/' + parts.join('/'))
} }
} }
return done(created) [ONDRAIN] () {
} if (this[CURRENT] && this[CURRENT].entry)
this[CURRENT].entry.resume()
}
/***/ }),
/***/ 8371: // like .pipe() but using super, because our write() is special
/***/ ((module) => { [PIPE] (job) {
job.piped = true
"use strict"; if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
module.exports = (mode, isDir, portable) => { const source = job.entry
mode &= 0o7777 const zip = this.zip
// in portable mode, use the minimum reasonable umask if (zip) {
// if this system creates files with 0o664 by default source.on('data', chunk => {
// (as some linux distros do), then we'll write the if (!zip.write(chunk))
// archive with 0o644 instead. Also, don't ever create source.pause()
// a file that is not readable/writable by the owner. })
if (portable) } else {
mode = (mode | 0o600) & ~0o22 source.on('data', chunk => {
if (!super.write(chunk))
source.pause()
})
}
}
// if dirs are readable, then they should be listable pause () {
if (isDir) { if (this.zip)
if (mode & 0o400) this.zip.pause()
mode |= 0o100 return super.pause()
if (mode & 0o40)
mode |= 0o10
if (mode & 0o4)
mode |= 0o1
} }
return mode })
}
class PackSync extends Pack {
constructor (opt) {
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
}
/***/ }), // pause/resume are no-ops in sync streams.
pause () {}
resume () {}
/***/ 7118: [STAT] (job) {
/***/ ((module) => { const stat = this.follow ? 'statSync' : 'lstatSync'
this[ONSTAT](job, fs[stat](job.absolute))
}
// warning: extremely hot code path. [READDIR] (job, stat) {
// This has been meticulously optimized for use this[ONREADDIR](job, fs.readdirSync(job.absolute))
// within npm install on large package trees. }
// Do not edit without careful benchmarking.
const normalizeCache = Object.create(null)
const {hasOwnProperty} = Object.prototype
module.exports = s => {
if (!hasOwnProperty.call(normalizeCache, s))
normalizeCache[s] = s.normalize('NFKD')
return normalizeCache[s]
}
// gotta get it all in this tick
[PIPE] (job) {
const source = job.entry
const zip = this.zip
/***/ }), if (job.readdir) {
job.readdir.forEach(entry => {
const p = job.path
const base = p === './' ? '' : p.replace(/\/*$/, '/')
this[ADDFSENTRY](base + entry)
})
}
/***/ 6843: if (zip) {
/***/ ((module) => { source.on('data', chunk => {
zip.write(chunk)
})
} else {
source.on('data', chunk => {
super[WRITE](chunk)
})
}
}
}
// on windows, either \ or / are valid directory separators. Pack.Sync = PackSync
// on unix, \ is a valid character in filenames.
// so, on windows, and only on windows, we replace all \ chars with /,
// so that we can use / as our one and only directory separator char.
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform module.exports = Pack
module.exports = platform !== 'win32' ? p => p
: p => p && p.replace(/\\/g, '/')
/***/ }), /***/ }),
/***/ 7900: /***/ 8917:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; "use strict";
// A readable tar stream creator // this[BUFFER] is the remainder of a chunk if we're waiting for
// Technically, this is a transform stream that you write paths into, // the full 512 bytes of a header to come in. We will Buffer.concat()
// and tar format comes out of. // it to the next write(), which is a mem copy, but a small one.
// The `add()` method is like `write()` but returns this, //
// and end() return `this` as well, so you can // this[QUEUE] is a Yallist of entries that haven't been emitted
// do `new Pack(opt).add('files').add('dir').end().pipe(output) // yet this can only get filled up if the user keeps write()ing after
// You could also do something like: // a write() returns false, or does a write() with more than one entry
// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) //
// We don't buffer chunks, we always parse them and either create an
class PackJob { // entry, or push it into the active entry. The ReadEntry class knows
constructor (path, absolute) { // to throw data away if .ignore=true
this.path = path || './' //
this.absolute = absolute // Shift entry off the buffer when it emits 'end', and emit 'entry' for
this.entry = null // the next one in the list.
this.stat = null //
this.readdir = null // At any time, we're pushing body chunks into the entry at WRITEENTRY,
this.pending = false // and waiting for 'end' on the entry at READENTRY
this.ignore = false //
this.piped = false // ignored entries get .resume() called on them straight away
}
}
const MiniPass = __nccwpck_require__(1077) const warner = __nccwpck_require__(5899)
const zlib = __nccwpck_require__(3486) const Header = __nccwpck_require__(6043)
const ReadEntry = __nccwpck_require__(8116) const EE = __nccwpck_require__(2361)
const WriteEntry = __nccwpck_require__(5450)
const WriteEntrySync = WriteEntry.Sync
const WriteEntryTar = WriteEntry.Tar
const Yallist = __nccwpck_require__(665) const Yallist = __nccwpck_require__(665)
const EOF = Buffer.alloc(1024) const maxMetaEntrySize = 1024 * 1024
const ONSTAT = Symbol('onStat') const Entry = __nccwpck_require__(8116)
const ENDED = Symbol('ended') const Pax = __nccwpck_require__(7996)
const zlib = __nccwpck_require__(3486)
const gzipHeader = Buffer.from([0x1f, 0x8b])
const STATE = Symbol('state')
const WRITEENTRY = Symbol('writeEntry')
const READENTRY = Symbol('readEntry')
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue') const QUEUE = Symbol('queue')
const CURRENT = Symbol('current') const ENDED = Symbol('ended')
const PROCESS = Symbol('process') const EMITTEDEND = Symbol('emittedEnd')
const PROCESSING = Symbol('processing') const EMIT = Symbol('emit')
const PROCESSJOB = Symbol('processJob') const UNZIP = Symbol('unzip')
const JOBS = Symbol('jobs') const CONSUMECHUNK = Symbol('consumeChunk')
const JOBDONE = Symbol('jobDone') const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const ADDFSENTRY = Symbol('addFSEntry') const CONSUMEBODY = Symbol('consumeBody')
const ADDTARENTRY = Symbol('addTarEntry') const CONSUMEMETA = Symbol('consumeMeta')
const STAT = Symbol('stat') const CONSUMEHEADER = Symbol('consumeHeader')
const READDIR = Symbol('readdir') const CONSUMING = Symbol('consuming')
const ONREADDIR = Symbol('onreaddir') const BUFFERCONCAT = Symbol('bufferConcat')
const PIPE = Symbol('pipe') const MAYBEEND = Symbol('maybeEnd')
const ENTRY = Symbol('entry') const WRITING = Symbol('writing')
const ENTRYOPT = Symbol('entryOpt') const ABORTED = Symbol('aborted')
const WRITEENTRYCLASS = Symbol('writeEntryClass') const DONE = Symbol('onDone')
const WRITE = Symbol('write') const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const ONDRAIN = Symbol('ondrain') const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const fs = __nccwpck_require__(7147) const noop = _ => true
const path = __nccwpck_require__(1017)
const warner = __nccwpck_require__(5899)
const normPath = __nccwpck_require__(6843)
const Pack = warner(class Pack extends MiniPass { module.exports = warner(class Parser extends EE {
constructor (opt) { constructor (opt) {
opt = opt || {}
super(opt) super(opt)
opt = opt || Object.create(null)
this.opt = opt
this.file = opt.file || '' this.file = opt.file || ''
this.cwd = opt.cwd || process.cwd()
this.maxReadSize = opt.maxReadSize
this.preservePaths = !!opt.preservePaths
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.prefix = normPath(opt.prefix || '')
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.readdirCache = opt.readdirCache || new Map()
this[WRITEENTRYCLASS] = WriteEntry // set to boolean false when an entry starts. 1024 bytes of \0
if (typeof opt.onwarn === 'function') // is technically a valid tarball, albeit a boring one.
this.on('warn', opt.onwarn) this[SAW_VALID_ENTRY] = null
this.portable = !!opt.portable // these BADARCHIVE errors can't be detected early. listen on DONE.
this.zip = null this.on(DONE, _ => {
if (opt.gzip) { if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
if (typeof opt.gzip !== 'object') // either less than 1 block of data, or all entries were invalid.
opt.gzip = {} // Either way, probably not even a tarball.
if (this.portable) this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
opt.gzip.portable = true }
this.zip = new zlib.Gzip(opt.gzip) })
this.zip.on('data', chunk => super.write(chunk))
this.zip.on('end', _ => super.end())
this.zip.on('drain', _ => this[ONDRAIN]())
this.on('resume', _ => this.zip.resume())
} else
this.on('drain', this[ONDRAIN])
this.noDirRecurse = !!opt.noDirRecurse if (opt.ondone)
this.follow = !!opt.follow this.on(DONE, opt.ondone)
this.noMtime = !!opt.noMtime else {
this.mtime = opt.mtime || null this.on(DONE, _ => {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
})
}
this.filter = typeof opt.filter === 'function' ? opt.filter : _ => true this.strict = !!opt.strict
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize
this.filter = typeof opt.filter === 'function' ? opt.filter : noop
// have to set this so that streams are ok piping into it
this.writable = true
this.readable = false
this[QUEUE] = new Yallist() this[QUEUE] = new Yallist()
this[JOBS] = 0 this[BUFFER] = null
this.jobs = +opt.jobs || 4 this[READENTRY] = null
this[PROCESSING] = false this[WRITEENTRY] = null
this[STATE] = 'begin'
this[META] = ''
this[EX] = null
this[GEX] = null
this[ENDED] = false this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
} }
[WRITE] (chunk) { [CONSUMEHEADER] (chunk, position) {
return super.write(chunk) if (this[SAW_VALID_ENTRY] === null)
} this[SAW_VALID_ENTRY] = false
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
add (path) { if (header.nullBlock) {
this.write(path) if (this[SAW_NULL_BLOCK]) {
return this this[SAW_EOF] = true
} // ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin')
this[STATE] = 'header'
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid)
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
else if (!header.path)
this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
end (path) { // we do this for meta & ignored entries as well, because they
if (path) // are still valid tar, or else we wouldn't know to ignore them
this.write(path) if (!this[SAW_VALID_ENTRY]) {
this[ENDED] = true if (entry.remain) {
this[PROCESS]() // this might be the one!
return this const onend = () => {
} if (!entry.invalid)
this[SAW_VALID_ENTRY] = true
}
entry.on('end', onend)
} else
this[SAW_VALID_ENTRY] = true
}
write (path) { if (entry.meta) {
if (this[ENDED]) if (entry.size > this.maxMetaEntrySize) {
throw new Error('write after end') entry.ignore = true
this[EMIT]('ignoredEntry', entry)
this[STATE] = 'ignore'
entry.resume()
} else if (entry.size > 0) {
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (path instanceof ReadEntry) if (entry.ignore) {
this[ADDTARENTRY](path) // probably valid, just not something we care about
else this[EMIT]('ignoredEntry', entry)
this[ADDFSENTRY](path) this[STATE] = entry.remain ? 'ignore' : 'header'
return this.flowing entry.resume()
} else {
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'header'
entry.end()
}
if (!this[READENTRY]) {
this[QUEUE].push(entry)
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
} }
[ADDTARENTRY] (p) { [PROCESSENTRY] (entry) {
const absolute = normPath(path.resolve(this.cwd, p.path)) let go = true
// in this case, we don't have to wait for the stat
if (!this.filter(p.path, p)) if (!entry) {
p.resume() this[READENTRY] = null
go = false
} else if (Array.isArray(entry))
this.emit.apply(this, entry)
else { else {
const job = new PackJob(p.path, absolute, false) this[READENTRY] = entry
job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)) this.emit('entry', entry)
job.entry.on('end', _ => this[JOBDONE](job)) if (!entry.emittedEnd) {
this[JOBS] += 1 entry.on('end', _ => this[NEXTENTRY]())
this[QUEUE].push(job) go = false
}
} }
this[PROCESS]() return go
} }
[ADDFSENTRY] (p) { [NEXTENTRY] () {
const absolute = normPath(path.resolve(this.cwd, p)) do {} while (this[PROCESSENTRY](this[QUEUE].shift()))
this[QUEUE].push(new PackJob(p, absolute))
this[PROCESS]()
}
[STAT] (job) { if (!this[QUEUE].length) {
job.pending = true // At this point, there's nothing in the queue, but we may have an
this[JOBS] += 1 // entry which is being consumed (readEntry).
const stat = this.follow ? 'stat' : 'lstat' // If we don't, then we definitely can handle more data.
fs[stat](job.absolute, (er, stat) => { // If we do, and either it's flowing, or it has never had any data
job.pending = false // written to it, then it needs more.
this[JOBS] -= 1 // The only other possibility is that it has returned false from a
if (er) // write() call, so we wait for the next drain to continue.
this.emit('error', er) const re = this[READENTRY]
else const drainNow = !re || re.flowing || re.size === re.remain
this[ONSTAT](job, stat) if (drainNow) {
}) if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
} }
[ONSTAT] (job, stat) { [CONSUMEBODY] (chunk, position) {
this.statCache.set(job.absolute, stat) // write up to but no more than writeEntry.blockRemain
job.stat = stat const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
// now we have the stat, we can filter it. entry.write(c)
if (!this.filter(job.path, stat))
job.ignore = true
this[PROCESS]() if (!entry.blockRemain) {
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
return c.length
} }
[READDIR] (job) { [CONSUMEMETA] (chunk, position) {
job.pending = true const entry = this[WRITEENTRY]
this[JOBS] += 1 const ret = this[CONSUMEBODY](chunk, position)
fs.readdir(job.absolute, (er, entries) => {
job.pending = false // if we finished, then the entry is reset
this[JOBS] -= 1 if (!this[WRITEENTRY])
if (er) this[EMITMETA](entry)
return this.emit('error', er)
this[ONREADDIR](job, entries) return ret
})
} }
[ONREADDIR] (job, entries) { [EMIT] (ev, data, extra) {
this.readdirCache.set(job.absolute, entries) if (!this[QUEUE].length && !this[READENTRY])
job.readdir = entries this.emit(ev, data, extra)
this[PROCESS]() else
this[QUEUE].push([ev, data, extra])
} }
[PROCESS] () { [EMITMETA] (entry) {
if (this[PROCESSING]) this[EMIT]('meta', this[META])
return switch (entry.type) {
case 'ExtendedHeader':
case 'OldExtendedHeader':
this[EX] = Pax.parse(this[META], this[EX], false)
break
this[PROCESSING] = true case 'GlobalExtendedHeader':
for (let w = this[QUEUE].head; this[GEX] = Pax.parse(this[META], this[GEX], true)
w !== null && this[JOBS] < this.jobs; break
w = w.next) {
this[PROCESSJOB](w.value)
if (w.value.ignore) {
const p = w.next
this[QUEUE].removeNode(w)
w.next = p
}
}
this[PROCESSING] = false case 'NextFileHasLongPath':
case 'OldGnuLongPath':
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { case 'NextFileHasLongLinkpath':
if (this.zip) this[EX] = this[EX] || Object.create(null)
this.zip.end(EOF) this[EX].linkpath = this[META].replace(/\0.*/, '')
else { break
super.write(EOF)
super.end()
}
}
}
get [CURRENT] () { /* istanbul ignore next */
return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value default: throw new Error('unknown meta: ' + entry.type)
}
} }
[JOBDONE] (job) { abort (error) {
this[QUEUE].shift() this[ABORTED] = true
this[JOBS] -= 1 this.emit('abort', error)
this[PROCESS]() // always throws, even in non-strict mode
this.warn('TAR_ABORT', error, { recoverable: false })
} }
[PROCESSJOB] (job) { write (chunk) {
if (job.pending) if (this[ABORTED])
return
if (job.entry) {
if (job === this[CURRENT] && !job.piped)
this[PIPE](job)
return return
}
if (!job.stat) { // first write, might be gzipped
if (this.statCache.has(job.absolute)) if (this[UNZIP] === null && chunk) {
this[ONSTAT](job, this.statCache.get(job.absolute)) if (this[BUFFER]) {
else chunk = Buffer.concat([this[BUFFER], chunk])
this[STAT](job) this[BUFFER] = null
}
if (chunk.length < gzipHeader.length) {
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
} }
if (!job.stat)
return
// filtered out! this[WRITING] = true
if (job.ignore) if (this[UNZIP])
return this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
if (!this.noDirRecurse && job.stat.isDirectory() && !job.readdir) { // return false if there's a queue, or if the current entry isn't flowing
if (this.readdirCache.has(job.absolute)) const ret =
this[ONREADDIR](job, this.readdirCache.get(job.absolute)) this[QUEUE].length ? false :
else this[READENTRY] ? this[READENTRY].flowing :
this[READDIR](job) true
if (!job.readdir)
return
}
// we know it doesn't have an entry, because that got checked above // if we have no queue, then that means a clogged READENTRY
job.entry = this[ENTRY](job) if (!ret && !this[QUEUE].length)
if (!job.entry) { this[READENTRY].once('drain', _ => this.emit('drain'))
job.ignore = true
return
}
if (job === this[CURRENT] && !job.piped) return ret
this[PIPE](job)
} }
[ENTRYOPT] (job) { [BUFFERCONCAT] (c) {
return { if (c && !this[ABORTED])
onwarn: (code, msg, data) => this.warn(code, msg, data), this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
noPax: this.noPax,
cwd: this.cwd,
absolute: job.absolute,
preservePaths: this.preservePaths,
maxReadSize: this.maxReadSize,
strict: this.strict,
portable: this.portable,
linkCache: this.linkCache,
statCache: this.statCache,
noMtime: this.noMtime,
mtime: this.mtime,
prefix: this.prefix,
}
} }
[ENTRY] (job) { [MAYBEEND] () {
this[JOBS] += 1 if (this[ENDED] &&
try { !this[EMITTEDEND] &&
return new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)) !this[ABORTED] &&
.on('end', () => this[JOBDONE](job)) !this[CONSUMING]) {
.on('error', er => this.emit('error', er)) this[EMITTEDEND] = true
} catch (er) { const entry = this[WRITEENTRY]
this.emit('error', er) if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, {entry})
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
} }
} }
[ONDRAIN] () { [CONSUMECHUNK] (chunk) {
if (this[CURRENT] && this[CURRENT].entry) if (this[CONSUMING])
this[CURRENT].entry.resume() this[BUFFERCONCAT](chunk)
} else if (!chunk && !this[BUFFER])
this[MAYBEEND]()
// like .pipe() but using super, because our write() is special else {
[PIPE] (job) { this[CONSUMING] = true
job.piped = true if (this[BUFFER]) {
this[BUFFERCONCAT](chunk)
if (job.readdir) { const c = this[BUFFER]
job.readdir.forEach(entry => { this[BUFFER] = null
const p = job.path this[CONSUMECHUNKSUB](c)
const base = p === './' ? '' : p.replace(/\/*$/, '/') } else
this[ADDFSENTRY](base + entry) this[CONSUMECHUNKSUB](chunk)
})
}
const source = job.entry
const zip = this.zip
if (zip) { while (this[BUFFER] &&
source.on('data', chunk => { this[BUFFER].length >= 512 &&
if (!zip.write(chunk)) !this[ABORTED] &&
source.pause() !this[SAW_EOF]) {
}) const c = this[BUFFER]
} else { this[BUFFER] = null
source.on('data', chunk => { this[CONSUMECHUNKSUB](c)
if (!super.write(chunk)) }
source.pause() this[CONSUMING] = false
})
} }
}
pause () {
if (this.zip)
this.zip.pause()
return super.pause()
}
})
class PackSync extends Pack { if (!this[BUFFER] || this[ENDED])
constructor (opt) { this[MAYBEEND]()
super(opt)
this[WRITEENTRYCLASS] = WriteEntrySync
} }
// pause/resume are no-ops in sync streams. [CONSUMECHUNKSUB] (chunk) {
pause () {} // we know that we are in CONSUMING mode, so anything written goes into
resume () {} // the buffer. Advance the position and put any remainder in the buffer.
let position = 0
const length = chunk.length
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
[STAT] (job) { case 'ignore':
const stat = this.follow ? 'statSync' : 'lstatSync' case 'body':
this[ONSTAT](job, fs[stat](job.absolute)) position += this[CONSUMEBODY](chunk, position)
} break
[READDIR] (job, stat) { case 'meta':
this[ONREADDIR](job, fs.readdirSync(job.absolute)) position += this[CONSUMEMETA](chunk, position)
} break
// gotta get it all in this tick /* istanbul ignore next */
[PIPE] (job) { default:
const source = job.entry throw new Error('invalid state: ' + this[STATE])
const zip = this.zip }
}
if (job.readdir) { if (position < length) {
job.readdir.forEach(entry => { if (this[BUFFER])
const p = job.path this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
const base = p === './' ? '' : p.replace(/\/*$/, '/') else
this[ADDFSENTRY](base + entry) this[BUFFER] = chunk.slice(position)
})
} }
}
if (zip) { end (chunk) {
source.on('data', chunk => { if (!this[ABORTED]) {
zip.write(chunk) if (this[UNZIP])
}) this[UNZIP].end(chunk)
} else { else {
source.on('data', chunk => { this[ENDED] = true
super[WRITE](chunk) this.write(chunk)
}) }
} }
} }
} })
Pack.Sync = PackSync
module.exports = Pack
/***/ }), /***/ }),
/***/ 8917: /***/ 9587:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; // A path exclusive reservation system
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// this[BUFFER] is the remainder of a chunk if we're waiting for // is called with a cb that clears the reservation.
// the full 512 bytes of a header to come in. We will Buffer.concat()
// it to the next write(), which is a mem copy, but a small one.
//
// this[QUEUE] is a Yallist of entries that haven't been emitted
// yet this can only get filled up if the user keeps write()ing after
// a write() returns false, or does a write() with more than one entry
//
// We don't buffer chunks, we always parse them and either create an
// entry, or push it into the active entry. The ReadEntry class knows
// to throw data away if .ignore=true
//
// Shift entry off the buffer when it emits 'end', and emit 'entry' for
// the next one in the list.
//
// At any time, we're pushing body chunks into the entry at WRITEENTRY,
// and waiting for 'end' on the entry at READENTRY
// //
// ignored entries get .resume() called on them straight away // Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const warner = __nccwpck_require__(5899)
const Header = __nccwpck_require__(6043)
const EE = __nccwpck_require__(2361)
const Yallist = __nccwpck_require__(665)
const maxMetaEntrySize = 1024 * 1024
const Entry = __nccwpck_require__(8116)
const Pax = __nccwpck_require__(7996)
const zlib = __nccwpck_require__(3486)
const gzipHeader = Buffer.from([0x1f, 0x8b]) const assert = __nccwpck_require__(9491)
const STATE = Symbol('state') const normalize = __nccwpck_require__(7118)
const WRITEENTRY = Symbol('writeEntry') const stripSlashes = __nccwpck_require__(8886)
const READENTRY = Symbol('readEntry') const { join } = __nccwpck_require__(1017)
const NEXTENTRY = Symbol('nextEntry')
const PROCESSENTRY = Symbol('processEntry')
const EX = Symbol('extendedHeader')
const GEX = Symbol('globalExtendedHeader')
const META = Symbol('meta')
const EMITMETA = Symbol('emitMeta')
const BUFFER = Symbol('buffer')
const QUEUE = Symbol('queue')
const ENDED = Symbol('ended')
const EMITTEDEND = Symbol('emittedEnd')
const EMIT = Symbol('emit')
const UNZIP = Symbol('unzip')
const CONSUMECHUNK = Symbol('consumeChunk')
const CONSUMECHUNKSUB = Symbol('consumeChunkSub')
const CONSUMEBODY = Symbol('consumeBody')
const CONSUMEMETA = Symbol('consumeMeta')
const CONSUMEHEADER = Symbol('consumeHeader')
const CONSUMING = Symbol('consuming')
const BUFFERCONCAT = Symbol('bufferConcat')
const MAYBEEND = Symbol('maybeEnd')
const WRITING = Symbol('writing')
const ABORTED = Symbol('aborted')
const DONE = Symbol('onDone')
const SAW_VALID_ENTRY = Symbol('sawValidEntry')
const SAW_NULL_BLOCK = Symbol('sawNullBlock')
const SAW_EOF = Symbol('sawEOF')
const noop = _ => true const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = warner(class Parser extends EE { module.exports = () => {
constructor (opt) { // path => [function or Set]
opt = opt || {} // A Set object means a directory reservation
super(opt) // A fn is a direct reservation on that path
const queues = new Map()
this.file = opt.file || '' // fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// set to boolean false when an entry starts. 1024 bytes of \0 // return a set of parent dirs for a given path
// is technically a valid tarball, albeit a boring one. // '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
this[SAW_VALID_ENTRY] = null const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = join(set[set.length - 1], path)
set.push(path || '/')
return set
}, [])
return dirs
}
// these BADARCHIVE errors can't be detected early. listen on DONE. // functions currently running
this.on(DONE, _ => { const running = new Set()
if (this[STATE] === 'begin' || this[SAW_VALID_ENTRY] === false) {
// either less than 1 block of data, or all entries were invalid.
// Either way, probably not even a tarball.
this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format')
}
})
if (opt.ondone) // return the queues for each path the function cares about
this.on(DONE, opt.ondone) // fn => {paths, dirs}
else { const getQueues = fn => {
this.on(DONE, _ => { const res = reservations.get(fn)
this.emit('prefinish') /* istanbul ignore if - unpossible */
this.emit('finish') if (!res)
this.emit('end') throw new Error('function does not have any path reservations')
this.emit('close') return {
}) paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
} }
}
this.strict = !!opt.strict // check if fn is first in line for all its paths, and is
this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize // included in the first set for all its dir queues
this.filter = typeof opt.filter === 'function' ? opt.filter : noop const check = fn => {
const {paths, dirs} = getQueues(fn)
// have to set this so that streams are ok piping into it return paths.every(q => q[0] === fn) &&
this.writable = true dirs.every(q => q[0] instanceof Set && q[0].has(fn))
this.readable = false }
this[QUEUE] = new Yallist() // run the function if it's first in line and not already running
this[BUFFER] = null const run = fn => {
this[READENTRY] = null if (running.has(fn) || !check(fn))
this[WRITEENTRY] = null return false
this[STATE] = 'begin' running.add(fn)
this[META] = '' fn(() => clear(fn))
this[EX] = null return true
this[GEX] = null
this[ENDED] = false
this[UNZIP] = null
this[ABORTED] = false
this[SAW_NULL_BLOCK] = false
this[SAW_EOF] = false
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
if (typeof opt.onentry === 'function')
this.on('entry', opt.onentry)
} }
[CONSUMEHEADER] (chunk, position) { const clear = fn => {
if (this[SAW_VALID_ENTRY] === null) if (!running.has(fn))
this[SAW_VALID_ENTRY] = false return false
let header
try {
header = new Header(chunk, position, this[EX], this[GEX])
} catch (er) {
return this.warn('TAR_ENTRY_INVALID', er)
}
if (header.nullBlock) { const { paths, dirs } = reservations.get(fn)
if (this[SAW_NULL_BLOCK]) { const next = new Set()
this[SAW_EOF] = true
// ending an archive with no entries. pointless, but legal.
if (this[STATE] === 'begin')
this[STATE] = 'header'
this[EMIT]('eof')
} else {
this[SAW_NULL_BLOCK] = true
this[EMIT]('nullBlock')
}
} else {
this[SAW_NULL_BLOCK] = false
if (!header.cksumValid)
this.warn('TAR_ENTRY_INVALID', 'checksum failure', {header})
else if (!header.path)
this.warn('TAR_ENTRY_INVALID', 'path is required', {header})
else {
const type = header.type
if (/^(Symbolic)?Link$/.test(type) && !header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath required', {header})
else if (!/^(Symbolic)?Link$/.test(type) && header.linkpath)
this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', {header})
else {
const entry = this[WRITEENTRY] = new Entry(header, this[EX], this[GEX])
// we do this for meta & ignored entries as well, because they paths.forEach(path => {
// are still valid tar, or else we wouldn't know to ignore them const q = queues.get(path)
if (!this[SAW_VALID_ENTRY]) { assert.equal(q[0], fn)
if (entry.remain) { if (q.length === 1)
// this might be the one! queues.delete(path)
const onend = () => { else {
if (!entry.invalid) q.shift()
this[SAW_VALID_ENTRY] = true if (typeof q[0] === 'function')
} next.add(q[0])
entry.on('end', onend) else
} else q[0].forEach(fn => next.add(fn))
this[SAW_VALID_ENTRY] = true }
} })
if (entry.meta) { dirs.forEach(dir => {
if (entry.size > this.maxMetaEntrySize) { const q = queues.get(dir)
entry.ignore = true assert(q[0] instanceof Set)
this[EMIT]('ignoredEntry', entry) if (q[0].size === 1 && q.length === 1)
this[STATE] = 'ignore' queues.delete(dir)
entry.resume() else if (q[0].size === 1) {
} else if (entry.size > 0) { q.shift()
this[META] = ''
entry.on('data', c => this[META] += c)
this[STATE] = 'meta'
}
} else {
this[EX] = null
entry.ignore = entry.ignore || !this.filter(entry.path, entry)
if (entry.ignore) { // must be a function or else the Set would've been reused
// probably valid, just not something we care about next.add(q[0])
this[EMIT]('ignoredEntry', entry) } else
this[STATE] = entry.remain ? 'ignore' : 'header' q[0].delete(fn)
entry.resume() })
} else { running.delete(fn)
if (entry.remain)
this[STATE] = 'body'
else {
this[STATE] = 'header'
entry.end()
}
if (!this[READENTRY]) { next.forEach(fn => run(fn))
this[QUEUE].push(entry) return true
this[NEXTENTRY]()
} else
this[QUEUE].push(entry)
}
}
}
}
}
} }
[PROCESSENTRY] (entry) { const reserve = (paths, fn) => {
let go = true // collide on matches across case and unicode normalization
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
})
if (!entry) { const dirs = new Set(
this[READENTRY] = null paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))
go = false )
} else if (Array.isArray(entry)) reservations.set(fn, {dirs, paths})
this.emit.apply(this, entry) paths.forEach(path => {
else { const q = queues.get(path)
this[READENTRY] = entry if (!q)
this.emit('entry', entry) queues.set(path, [fn])
if (!entry.emittedEnd) { else
entry.on('end', _ => this[NEXTENTRY]()) q.push(fn)
go = false })
} dirs.forEach(dir => {
} const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length - 1] instanceof Set)
q[q.length - 1].add(fn)
else
q.push(new Set([fn]))
})
return go return run(fn)
} }
[NEXTENTRY] () { return { check, reserve }
do {} while (this[PROCESSENTRY](this[QUEUE].shift())) }
if (!this[QUEUE].length) {
// At this point, there's nothing in the queue, but we may have an
// entry which is being consumed (readEntry).
// If we don't, then we definitely can handle more data.
// If we do, and either it's flowing, or it has never had any data
// written to it, then it needs more.
// The only other possibility is that it has returned false from a
// write() call, so we wait for the next drain to continue.
const re = this[READENTRY]
const drainNow = !re || re.flowing || re.size === re.remain
if (drainNow) {
if (!this[WRITING])
this.emit('drain')
} else
re.once('drain', _ => this.emit('drain'))
}
}
[CONSUMEBODY] (chunk, position) { /***/ }),
// write up to but no more than writeEntry.blockRemain
const entry = this[WRITEENTRY]
const br = entry.blockRemain
const c = (br >= chunk.length && position === 0) ? chunk
: chunk.slice(position, position + br)
entry.write(c) /***/ 7996:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
if (!entry.blockRemain) { "use strict";
this[STATE] = 'header'
this[WRITEENTRY] = null
entry.end()
}
return c.length const Header = __nccwpck_require__(6043)
const path = __nccwpck_require__(1017)
class Pax {
constructor (obj, global) {
this.atime = obj.atime || null
this.charset = obj.charset || null
this.comment = obj.comment || null
this.ctime = obj.ctime || null
this.gid = obj.gid || null
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
} }
[CONSUMEMETA] (chunk, position) { encode () {
const entry = this[WRITEENTRY] const body = this.encodeBody()
const ret = this[CONSUMEBODY](chunk, position) if (body === '')
return null
// if we finished, then the entry is reset const bodyLen = Buffer.byteLength(body)
if (!this[WRITEENTRY]) // round up to 512 bytes
this[EMITMETA](entry) // add 512 for header
const bufLen = 512 * Math.ceil(1 + bodyLen / 512)
const buf = Buffer.allocUnsafe(bufLen)
return ret // 0-fill the header section, it might not hit every field
for (let i = 0; i < 512; i++)
buf[i] = 0
new Header({
// XXX split the path
// then the path should be PaxHeader + basename, but less than 99,
// prepend with the dirname
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99),
mode: this.mode || 0o644,
uid: this.uid || null,
gid: this.gid || null,
size: bodyLen,
mtime: this.mtime || null,
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader',
linkpath: '',
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null,
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8')
// null pad after the body
for (let i = bodyLen + 512; i < buf.length; i++)
buf[i] = 0
return buf
} }
[EMIT] (ev, data, extra) { encodeBody () {
if (!this[QUEUE].length && !this[READENTRY]) return (
this.emit(ev, data, extra) this.encodeField('path') +
else this.encodeField('ctime') +
this[QUEUE].push([ev, data, extra]) this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
} }
[EMITMETA] (entry) { encodeField (field) {
this[EMIT]('meta', this[META]) if (this[field] === null || this[field] === undefined)
switch (entry.type) { return ''
case 'ExtendedHeader': const v = this[field] instanceof Date ? this[field].getTime() / 1000
case 'OldExtendedHeader': : this[field]
this[EX] = Pax.parse(this[META], this[EX], false) const s = ' ' +
break (field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
case 'GlobalExtendedHeader': Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g)
this[GEX] = Pax.parse(this[META], this[GEX], true)
break
case 'NextFileHasLongPath': const merge = (a, b) =>
case 'OldGnuLongPath': b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a
this[EX] = this[EX] || Object.create(null)
this[EX].path = this[META].replace(/\0.*/, '')
break
case 'NextFileHasLongLinkpath': const parseKV = string =>
this[EX] = this[EX] || Object.create(null) string
this[EX].linkpath = this[META].replace(/\0.*/, '') .replace(/\n$/, '')
break .split('\n')
.reduce(parseKVLine, Object.create(null))
/* istanbul ignore next */ const parseKVLine = (set, line) => {
default: throw new Error('unknown meta: ' + entry.type) const n = parseInt(line, 10)
}
}
abort (error) { // XXX Values with \n in them will fail this.
this[ABORTED] = true // Refactor to not be a naive line-by-line parse.
this.emit('abort', error) if (n !== Buffer.byteLength(line) + 1)
// always throws, even in non-strict mode return set
this.warn('TAR_ABORT', error, { recoverable: false })
}
write (chunk) { line = line.substr((n + ' ').length)
if (this[ABORTED]) const kv = line.split('=')
return const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
// first write, might be gzipped const v = kv.join('=')
if (this[UNZIP] === null && chunk) { set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k)
if (this[BUFFER]) { ? new Date(v * 1000)
chunk = Buffer.concat([this[BUFFER], chunk]) : /^[0-9]+$/.test(v) ? +v
this[BUFFER] = null : v
} return set
if (chunk.length < gzipHeader.length) { }
this[BUFFER] = chunk
return true
}
for (let i = 0; this[UNZIP] === null && i < gzipHeader.length; i++) {
if (chunk[i] !== gzipHeader[i])
this[UNZIP] = false
}
if (this[UNZIP] === null) {
const ended = this[ENDED]
this[ENDED] = false
this[UNZIP] = new zlib.Unzip()
this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk))
this[UNZIP].on('error', er => this.abort(er))
this[UNZIP].on('end', _ => {
this[ENDED] = true
this[CONSUMECHUNK]()
})
this[WRITING] = true
const ret = this[UNZIP][ended ? 'end' : 'write'](chunk)
this[WRITING] = false
return ret
}
}
this[WRITING] = true module.exports = Pax
if (this[UNZIP])
this[UNZIP].write(chunk)
else
this[CONSUMECHUNK](chunk)
this[WRITING] = false
// return false if there's a queue, or if the current entry isn't flowing
const ret =
this[QUEUE].length ? false :
this[READENTRY] ? this[READENTRY].flowing :
true
// if we have no queue, then that means a clogged READENTRY /***/ }),
if (!ret && !this[QUEUE].length)
this[READENTRY].once('drain', _ => this.emit('drain'))
return ret /***/ 8116:
} /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
[BUFFERCONCAT] (c) { "use strict";
if (c && !this[ABORTED])
this[BUFFER] = this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c
}
[MAYBEEND] () { const MiniPass = __nccwpck_require__(6684)
if (this[ENDED] && const normPath = __nccwpck_require__(6843)
!this[EMITTEDEND] &&
!this[ABORTED] &&
!this[CONSUMING]) {
this[EMITTEDEND] = true
const entry = this[WRITEENTRY]
if (entry && entry.blockRemain) {
// truncated, likely a damaged file
const have = this[BUFFER] ? this[BUFFER].length : 0
this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${
entry.blockRemain} more bytes, only ${have} available)`, {entry})
if (this[BUFFER])
entry.write(this[BUFFER])
entry.end()
}
this[EMIT](DONE)
}
}
[CONSUMECHUNK] (chunk) { const SLURP = Symbol('slurp')
if (this[CONSUMING]) module.exports = class ReadEntry extends MiniPass {
this[BUFFERCONCAT](chunk) constructor (header, ex, gex) {
else if (!chunk && !this[BUFFER]) super()
this[MAYBEEND]() // read entries always start life paused. this is to avoid the
else { // situation where Minipass's auto-ending empty streams results
this[CONSUMING] = true // in an entry ending before we're ready for it.
if (this[BUFFER]) { this.pause()
this[BUFFERCONCAT](chunk) this.extended = ex
const c = this[BUFFER] this.globalExtended = gex
this[BUFFER] = null this.header = header
this[CONSUMECHUNKSUB](c) this.startBlockSize = 512 * Math.ceil(header.size / 512)
} else this.blockRemain = this.startBlockSize
this[CONSUMECHUNKSUB](chunk) this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
while (this[BUFFER] && case 'NextFileHasLongLinkpath':
this[BUFFER].length >= 512 && case 'NextFileHasLongPath':
!this[ABORTED] && case 'OldGnuLongPath':
!this[SAW_EOF]) { case 'GlobalExtendedHeader':
const c = this[BUFFER] case 'ExtendedHeader':
this[BUFFER] = null case 'OldExtendedHeader':
this[CONSUMECHUNKSUB](c) this.meta = true
} break
this[CONSUMING] = false
// NOTE: gnutar and bsdtar treat unrecognized types as 'File'
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
} }
if (!this[BUFFER] || this[ENDED]) this.path = normPath(header.path)
this[MAYBEEND]() this.mode = header.mode
} if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
[CONSUMECHUNKSUB] (chunk) { if (ex)
// we know that we are in CONSUMING mode, so anything written goes into this[SLURP](ex)
// the buffer. Advance the position and put any remainder in the buffer. if (gex)
let position = 0 this[SLURP](gex, true)
const length = chunk.length }
while (position + 512 <= length && !this[ABORTED] && !this[SAW_EOF]) {
switch (this[STATE]) {
case 'begin':
case 'header':
this[CONSUMEHEADER](chunk, position)
position += 512
break
case 'ignore': write (data) {
case 'body': const writeLen = data.length
position += this[CONSUMEBODY](chunk, position) if (writeLen > this.blockRemain)
break throw new Error('writing more to entry than is appropriate')
case 'meta': const r = this.remain
position += this[CONSUMEMETA](chunk, position) const br = this.blockRemain
break this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
/* istanbul ignore next */ if (r >= writeLen)
default: return super.write(data)
throw new Error('invalid state: ' + this[STATE])
}
}
if (position < length) { // r < writeLen
if (this[BUFFER]) return super.write(data.slice(0, r))
this[BUFFER] = Buffer.concat([chunk.slice(position), this[BUFFER]])
else
this[BUFFER] = chunk.slice(position)
}
} }
end (chunk) { [SLURP] (ex, global) {
if (!this[ABORTED]) { for (const k in ex) {
if (this[UNZIP]) // we slurp in everything except for the path attribute in
this[UNZIP].end(chunk) // a global extended header, because that's weird.
else { if (ex[k] !== null && ex[k] !== undefined &&
this[ENDED] = true !(global && k === 'path'))
this.write(chunk) this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k]
}
} }
} }
}) }
/***/ }), /***/ }),
/***/ 9587: /***/ 5923:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
// A path exclusive reservation system "use strict";
// reserve([list, of, paths], fn)
// When the fn is first in line for all its paths, it
// is called with a cb that clears the reservation.
//
// Used by async unpack to avoid clobbering paths in use,
// while still allowing maximal safe parallelization.
const assert = __nccwpck_require__(9491)
const normalize = __nccwpck_require__(7118)
const stripSlashes = __nccwpck_require__(8886)
const { join } = __nccwpck_require__(1017)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
module.exports = () => {
// path => [function or Set]
// A Set object means a directory reservation
// A fn is a direct reservation on that path
const queues = new Map()
// fn => {paths:[path,...], dirs:[path, ...]}
const reservations = new Map()
// return a set of parent dirs for a given path
// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d']
const getDirs = path => {
const dirs = path.split('/').slice(0, -1).reduce((set, path) => {
if (set.length)
path = join(set[set.length - 1], path)
set.push(path || '/')
return set
}, [])
return dirs
}
// functions currently running // tar -r
const running = new Set() const hlo = __nccwpck_require__(5274)
const Pack = __nccwpck_require__(7900)
const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714)
const t = __nccwpck_require__(1525)
const path = __nccwpck_require__(1017)
// return the queues for each path the function cares about // starting at the head of the file, read a Header
// fn => {paths, dirs} // If the checksum is invalid, that's our position to start writing
const getQueues = fn => { // If it is, jump forward by the specified size (round up to 512)
const res = reservations.get(fn) // and try again.
/* istanbul ignore if - unpossible */ // Write the new Pack stream starting there.
if (!res)
throw new Error('function does not have any path reservations')
return {
paths: res.paths.map(path => queues.get(path)),
dirs: [...res.dirs].map(path => queues.get(path)),
}
}
// check if fn is first in line for all its paths, and is const Header = __nccwpck_require__(6043)
// included in the first set for all its dir queues
const check = fn => {
const {paths, dirs} = getQueues(fn)
return paths.every(q => q[0] === fn) &&
dirs.every(q => q[0] instanceof Set && q[0].has(fn))
}
// run the function if it's first in line and not already running module.exports = (opt_, files, cb) => {
const run = fn => { const opt = hlo(opt_)
if (running.has(fn) || !check(fn))
return false
running.add(fn)
fn(() => clear(fn))
return true
}
const clear = fn => { if (!opt.file)
if (!running.has(fn)) throw new TypeError('file is required')
return false
const { paths, dirs } = reservations.get(fn) if (opt.gzip)
const next = new Set() throw new TypeError('cannot append to compressed archives')
paths.forEach(path => { if (!files || !Array.isArray(files) || !files.length)
const q = queues.get(path) throw new TypeError('no files or directories specified')
assert.equal(q[0], fn)
if (q.length === 1)
queues.delete(path)
else {
q.shift()
if (typeof q[0] === 'function')
next.add(q[0])
else
q[0].forEach(fn => next.add(fn))
}
})
dirs.forEach(dir => { files = Array.from(files)
const q = queues.get(dir)
assert(q[0] instanceof Set)
if (q[0].size === 1 && q.length === 1)
queues.delete(dir)
else if (q[0].size === 1) {
q.shift()
// must be a function or else the Set would've been reused return opt.sync ? replaceSync(opt, files)
next.add(q[0]) : replace(opt, files, cb)
} else }
q[0].delete(fn)
})
running.delete(fn)
next.forEach(fn => run(fn)) const replaceSync = (opt, files) => {
return true const p = new Pack.Sync(opt)
}
const reserve = (paths, fn) => { let threw = true
// collide on matches across case and unicode normalization let fd
// On windows, thanks to the magic of 8.3 shortnames, it is fundamentally let position
// impossible to determine whether two paths refer to the same thing on
// disk, without asking the kernel for a shortname.
// So, we just pretend that every path matches every other path here,
// effectively removing all parallelization on windows.
paths = isWindows ? ['win32 parallelization disabled'] : paths.map(p => {
// don't need normPath, because we skip this entirely for windows
return normalize(stripSlashes(join(p))).toLowerCase()
})
const dirs = new Set( try {
paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b)) try {
) fd = fs.openSync(opt.file, 'r+')
reservations.set(fn, {dirs, paths}) } catch (er) {
paths.forEach(path => { if (er.code === 'ENOENT')
const q = queues.get(path) fd = fs.openSync(opt.file, 'w+')
if (!q)
queues.set(path, [fn])
else
q.push(fn)
})
dirs.forEach(dir => {
const q = queues.get(dir)
if (!q)
queues.set(dir, [new Set([fn])])
else if (q[q.length - 1] instanceof Set)
q[q.length - 1].add(fn)
else else
q.push(new Set([fn])) throw er
}) }
return run(fn)
}
return { check, reserve }
}
const st = fs.fstatSync(fd)
const headBuf = Buffer.alloc(512)
/***/ }), POSITION: for (position = 0; position < st.size; position += 512) {
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) {
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
/***/ 7996: if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { throw new Error('cannot append to compressed archives')
"use strict"; if (!bytes)
break POSITION
}
const Header = __nccwpck_require__(6043) const h = new Header(headBuf)
const path = __nccwpck_require__(1017) if (!h.cksumValid)
break
const entryBlockSize = 512 * Math.ceil(h.size / 512)
if (position + entryBlockSize + 512 > st.size)
break
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
}
threw = false
class Pax { streamSync(opt, p, position, fd, files)
constructor (obj, global) { } finally {
this.atime = obj.atime || null if (threw) {
this.charset = obj.charset || null try {
this.comment = obj.comment || null fs.closeSync(fd)
this.ctime = obj.ctime || null } catch (er) {}
this.gid = obj.gid || null }
this.gname = obj.gname || null
this.linkpath = obj.linkpath || null
this.mtime = obj.mtime || null
this.path = obj.path || null
this.size = obj.size || null
this.uid = obj.uid || null
this.uname = obj.uname || null
this.dev = obj.dev || null
this.ino = obj.ino || null
this.nlink = obj.nlink || null
this.global = global || false
} }
}
const streamSync = (opt, p, position, fd, files) => {
const stream = new fsm.WriteStreamSync(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
addFilesSync(p, files)
}
encode () { const replace = (opt, files, cb) => {
const body = this.encodeBody() files = Array.from(files)
if (body === '') const p = new Pack(opt)
return null
const bodyLen = Buffer.byteLength(body) const getPos = (fd, size, cb_) => {
// round up to 512 bytes const cb = (er, pos) => {
// add 512 for header if (er)
const bufLen = 512 * Math.ceil(1 + bodyLen / 512) fs.close(fd, _ => cb_(er))
const buf = Buffer.allocUnsafe(bufLen) else
cb_(null, pos)
}
// 0-fill the header section, it might not hit every field let position = 0
for (let i = 0; i < 512; i++) if (size === 0)
buf[i] = 0 return cb(null, 0)
new Header({ let bufPos = 0
// XXX split the path const headBuf = Buffer.alloc(512)
// then the path should be PaxHeader + basename, but less than 99, const onread = (er, bytes) => {
// prepend with the dirname if (er)
path: ('PaxHeader/' + path.basename(this.path)).slice(0, 99), return cb(er)
mode: this.mode || 0o644, bufPos += bytes
uid: this.uid || null, if (bufPos < 512 && bytes) {
gid: this.gid || null, return fs.read(
size: bodyLen, fd, headBuf, bufPos, headBuf.length - bufPos,
mtime: this.mtime || null, position + bufPos, onread
type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', )
linkpath: '', }
uname: this.uname || '',
gname: this.gname || '',
devmaj: 0,
devmin: 0,
atime: this.atime || null,
ctime: this.ctime || null,
}).encode(buf)
buf.write(body, 512, bodyLen, 'utf8') if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b)
return cb(new Error('cannot append to compressed archives'))
// null pad after the body // truncated header
for (let i = bodyLen + 512; i < buf.length; i++) if (bufPos < 512)
buf[i] = 0 return cb(null, position)
return buf const h = new Header(headBuf)
} if (!h.cksumValid)
return cb(null, position)
encodeBody () { const entryBlockSize = 512 * Math.ceil(h.size / 512)
return ( if (position + entryBlockSize + 512 > size)
this.encodeField('path') + return cb(null, position)
this.encodeField('ctime') +
this.encodeField('atime') +
this.encodeField('dev') +
this.encodeField('ino') +
this.encodeField('nlink') +
this.encodeField('charset') +
this.encodeField('comment') +
this.encodeField('gid') +
this.encodeField('gname') +
this.encodeField('linkpath') +
this.encodeField('mtime') +
this.encodeField('size') +
this.encodeField('uid') +
this.encodeField('uname')
)
}
encodeField (field) { position += entryBlockSize + 512
if (this[field] === null || this[field] === undefined) if (position >= size)
return '' return cb(null, position)
const v = this[field] instanceof Date ? this[field].getTime() / 1000
: this[field]
const s = ' ' +
(field === 'dev' || field === 'ino' || field === 'nlink'
? 'SCHILY.' : '') +
field + '=' + v + '\n'
const byteLen = Buffer.byteLength(s)
// the digits includes the length of the digits in ascii base-10
// so if it's 9 characters, then adding 1 for the 9 makes it 10
// which makes it 11 chars.
let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1
if (byteLen + digits >= Math.pow(10, digits))
digits += 1
const len = digits + byteLen
return len + s
}
}
Pax.parse = (string, ex, g) => new Pax(merge(parseKV(string), ex), g) if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread)
}
fs.read(fd, headBuf, 0, 512, position, onread)
}
const merge = (a, b) => const promise = new Promise((resolve, reject) => {
b ? Object.keys(a).reduce((s, k) => (s[k] = a[k], s), b) : a p.on('error', reject)
let flag = 'r+'
const onopen = (er, fd) => {
if (er && er.code === 'ENOENT' && flag === 'r+') {
flag = 'w+'
return fs.open(opt.file, flag, onopen)
}
const parseKV = string => if (er)
string return reject(er)
.replace(/\n$/, '')
.split('\n')
.reduce(parseKVLine, Object.create(null))
const parseKVLine = (set, line) => { fs.fstat(fd, (er, st) => {
const n = parseInt(line, 10) if (er)
return fs.close(fd, () => reject(er))
// XXX Values with \n in them will fail this. getPos(fd, st.size, (er, position) => {
// Refactor to not be a naive line-by-line parse. if (er)
if (n !== Buffer.byteLength(line) + 1) return reject(er)
return set const stream = new fsm.WriteStream(opt.file, {
fd: fd,
start: position,
})
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
})
})
}
fs.open(opt.file, flag, onopen)
})
line = line.substr((n + ' ').length) return cb ? promise.then(cb, cb) : promise
const kv = line.split('=') }
const k = kv.shift().replace(/^SCHILY\.(dev|ino|nlink)/, '$1')
if (!k)
return set
const v = kv.join('=') const addFilesSync = (p, files) => {
set[k] = /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) files.forEach(file => {
? new Date(v * 1000) if (file.charAt(0) === '@') {
: /^[0-9]+$/.test(v) ? +v t({
: v file: path.resolve(p.cwd, file.substr(1)),
return set sync: true,
noResume: true,
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
} }
module.exports = Pax const addFilesAsync = (p, files) => {
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
}
p.end()
}
/***/ }), /***/ }),
/***/ 8116: /***/ 7111:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; // unix absolute paths are also absolute on win32, so we use this for both
const { isAbsolute, parse } = (__nccwpck_require__(1017).win32)
const MiniPass = __nccwpck_require__(1077) // returns [root, stripped]
const normPath = __nccwpck_require__(6843) // Note that windows will think that //x/y/z/a has a "root" of //x/y, and in
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip /
// explicitly if it's the first character.
// drive-specific relative paths on Windows get their root stripped off even
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo']
module.exports = path => {
let r = ''
const SLURP = Symbol('slurp') let parsed = parse(path)
module.exports = class ReadEntry extends MiniPass { while (isAbsolute(path) || parsed.root) {
constructor (header, ex, gex) { // windows will think that //x/y/z has a "root" of //x/y/
super() // but strip the //?/C:/ off of //?/C:/path
// read entries always start life paused. this is to avoid the const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/'
// situation where Minipass's auto-ending empty streams results : parsed.root
// in an entry ending before we're ready for it. path = path.substr(root.length)
this.pause() r += root
this.extended = ex parsed = parse(path)
this.globalExtended = gex }
this.header = header return [r, path]
this.startBlockSize = 512 * Math.ceil(header.size / 512) }
this.blockRemain = this.startBlockSize
this.remain = header.size
this.type = header.type
this.meta = false
this.ignore = false
switch (this.type) {
case 'File':
case 'OldFile':
case 'Link':
case 'SymbolicLink':
case 'CharacterDevice':
case 'BlockDevice':
case 'Directory':
case 'FIFO':
case 'ContiguousFile':
case 'GNUDumpDir':
break
case 'NextFileHasLongLinkpath':
case 'NextFileHasLongPath':
case 'OldGnuLongPath':
case 'GlobalExtendedHeader':
case 'ExtendedHeader':
case 'OldExtendedHeader':
this.meta = true
break
// NOTE: gnutar and bsdtar treat unrecognized types as 'File' /***/ }),
// it may be worth doing the same, but with a warning.
default:
this.ignore = true
}
this.path = normPath(header.path) /***/ 8886:
this.mode = header.mode /***/ ((module) => {
if (this.mode)
this.mode = this.mode & 0o7777
this.uid = header.uid
this.gid = header.gid
this.uname = header.uname
this.gname = header.gname
this.size = header.size
this.mtime = header.mtime
this.atime = header.atime
this.ctime = header.ctime
this.linkpath = normPath(header.linkpath)
this.uname = header.uname
this.gname = header.gname
if (ex) // warning: extremely hot code path.
this[SLURP](ex) // This has been meticulously optimized for use
if (gex) // within npm install on large package trees.
this[SLURP](gex, true) // Do not edit without careful benchmarking.
module.exports = str => {
let i = str.length - 1
let slashesStart = -1
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i
i--
} }
return slashesStart === -1 ? str : str.slice(0, slashesStart)
}
write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
const r = this.remain /***/ }),
const br = this.blockRemain
this.remain = Math.max(0, r - writeLen)
this.blockRemain = Math.max(0, br - writeLen)
if (this.ignore)
return true
if (r >= writeLen) /***/ 4173:
return super.write(data) /***/ ((__unused_webpack_module, exports) => {
// r < writeLen "use strict";
return super.write(data.slice(0, r))
}
[SLURP] (ex, global) { // map types from key to human-friendly name
for (const k in ex) { exports.name = new Map([
// we slurp in everything except for the path attribute in ['0', 'File'],
// a global extended header, because that's weird. // same as File
if (ex[k] !== null && ex[k] !== undefined && ['', 'OldFile'],
!(global && k === 'path')) ['1', 'Link'],
this[k] = k === 'path' || k === 'linkpath' ? normPath(ex[k]) : ex[k] ['2', 'SymbolicLink'],
} // Devices and FIFOs aren't fully supported
} // they are parsed, but skipped when unpacking
} ['3', 'CharacterDevice'],
['4', 'BlockDevice'],
['5', 'Directory'],
['6', 'FIFO'],
// same as File
['7', 'ContiguousFile'],
// pax headers
['g', 'GlobalExtendedHeader'],
['x', 'ExtendedHeader'],
// vendor-specific stuff
// skip
['A', 'SolarisACL'],
// like 5, but with data, which should be skipped
['D', 'GNUDumpDir'],
// metadata only, skip
['I', 'Inode'],
// data = link path of next file
['K', 'NextFileHasLongLinkpath'],
// data = path of next file
['L', 'NextFileHasLongPath'],
// skip
['M', 'ContinuationFile'],
// like L
['N', 'OldGnuLongPath'],
// skip
['S', 'SparseFile'],
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
])
// map the other direction
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]]))
/***/ }), /***/ }),
/***/ 5923: /***/ 7628:
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
"use strict"; "use strict";
// tar -r // the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet.
const hlo = __nccwpck_require__(5274) // but the path reservations are required to avoid race conditions where
const Pack = __nccwpck_require__(7900) // parallelized unpack ops may mess with one another, due to dependencies
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
const assert = __nccwpck_require__(9491)
const Parser = __nccwpck_require__(8917)
const fs = __nccwpck_require__(7147) const fs = __nccwpck_require__(7147)
const fsm = __nccwpck_require__(7714) const fsm = __nccwpck_require__(7714)
const t = __nccwpck_require__(1525)
const path = __nccwpck_require__(1017) const path = __nccwpck_require__(1017)
const mkdir = __nccwpck_require__(9624)
const wc = __nccwpck_require__(4808)
const pathReservations = __nccwpck_require__(9587)
const stripAbsolutePath = __nccwpck_require__(7111)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const normalize = __nccwpck_require__(7118)
// starting at the head of the file, read a Header const ONENTRY = Symbol('onEntry')
// If the checksum is invalid, that's our position to start writing const CHECKFS = Symbol('checkFs')
// If it is, jump forward by the specified size (round up to 512) const CHECKFS2 = Symbol('checkFs2')
// and try again. const PRUNECACHE = Symbol('pruneCache')
// Write the new Pack stream starting there. const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = __nccwpck_require__(6113)
const getFlag = __nccwpck_require__(1172)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
const Header = __nccwpck_require__(6043) // Unlinks on Windows are not atomic.
//
// This means that if you have a file entry, followed by another
// file entry with an identical name, and you cannot re-use the file
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows)
return fs.unlink(path, cb)
module.exports = (opt_, files, cb) => { const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
const opt = hlo(opt_) fs.rename(path, name, er => {
if (er)
return cb(er)
fs.unlink(name, cb)
})
}
if (!opt.file) /* istanbul ignore next */
throw new TypeError('file is required') const unlinkFileSync = path => {
if (!isWindows)
return fs.unlinkSync(path)
if (opt.gzip) const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex')
throw new TypeError('cannot append to compressed archives') fs.renameSync(path, name)
fs.unlinkSync(name)
}
// this.gid, entry.gid, this.processUid
const uint32 = (a, b, c) =>
a === a >>> 0 ? a
: b === b >>> 0 ? b
: c
// clear the cache if it's a case-insensitive unicode-squashing match.
// we can't know if the current file system is case-sensitive or supports
// unicode fully, so we check for similarity on the maximally compatible
// representation. Err on the side of pruning, since all it's doing is
// preventing lstats, and it's not the end of the world if we get a false
// positive.
// Note that on windows, we always drop the entire cache whenever a
// symbolic link is encountered, because 8.3 filenames are impossible
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
.toLowerCase()
const pruneCache = (cache, abs) => {
abs = cacheKeyNormalize(abs)
for (const path of cache.keys()) {
const pnorm = cacheKeyNormalize(path)
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0)
cache.delete(path)
}
}
const dropCache = cache => {
for (const key of cache.keys())
cache.delete(key)
}
if (!files || !Array.isArray(files) || !files.length) class Unpack extends Parser {
throw new TypeError('no files or directories specified') constructor (opt) {
if (!opt)
opt = {}
files = Array.from(files) opt.ondone = _ => {
this[ENDED] = true
this[MAYBECLOSE]()
}
return opt.sync ? replaceSync(opt, files) super(opt)
: replace(opt, files, cb)
}
const replaceSync = (opt, files) => { this[CHECKED_CWD] = false
const p = new Pack.Sync(opt)
let threw = true this.reservations = pathReservations()
let fd
let position
try { this.transform = typeof opt.transform === 'function' ? opt.transform : null
try {
fd = fs.openSync(opt.file, 'r+')
} catch (er) {
if (er.code === 'ENOENT')
fd = fs.openSync(opt.file, 'w+')
else
throw er
}
const st = fs.fstatSync(fd) this.writable = true
const headBuf = Buffer.alloc(512) this.readable = false
POSITION: for (position = 0; position < st.size; position += 512) { this[PENDING] = 0
for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { this[ENDED] = false
bytes = fs.readSync(
fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos
)
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) this.dirCache = opt.dirCache || new Map()
throw new Error('cannot append to compressed archives')
if (!bytes) if (typeof opt.uid === 'number' || typeof opt.gid === 'number') {
break POSITION // need both or neither
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number')
throw new TypeError('cannot set owner without number uid and gid')
if (opt.preserveOwner) {
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
} }
this.uid = opt.uid
const h = new Header(headBuf) this.gid = opt.gid
if (!h.cksumValid) this.setOwner = true
break } else {
const entryBlockSize = 512 * Math.ceil(h.size / 512) this.uid = null
if (position + entryBlockSize + 512 > st.size) this.gid = null
break this.setOwner = false
// the 512 for the header we just parsed will be added as well
// also jump ahead all the blocks for the body
position += entryBlockSize
if (opt.mtimeCache)
opt.mtimeCache.set(h.path, h.mtime)
} }
threw = false
streamSync(opt, p, position, fd, files) // default true for root
} finally { if (opt.preserveOwner === undefined && typeof opt.uid !== 'number')
if (threw) { this.preserveOwner = process.getuid && process.getuid() === 0
try { else
fs.closeSync(fd) this.preserveOwner = !!opt.preserveOwner
} catch (er) {}
}
}
}
const streamSync = (opt, p, position, fd, files) => { this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ?
const stream = new fsm.WriteStreamSync(opt.file, { process.getuid() : null
fd: fd, this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
start: position, process.getgid() : null
})
p.pipe(stream)
addFilesSync(p, files)
}
const replace = (opt, files, cb) => { // mostly just for testing, but useful in some cases.
files = Array.from(files) // Forcibly trigger a chown on every entry, no matter what
const p = new Pack(opt) this.forceChown = opt.forceChown === true
const getPos = (fd, size, cb_) => { // turn ><?| in filenames into 0xf000-higher encoded forms
const cb = (er, pos) => { this.win32 = !!opt.win32 || isWindows
if (er)
fs.close(fd, _ => cb_(er))
else
cb_(null, pos)
}
let position = 0 // do not unpack over files that are newer than what's in the archive
if (size === 0) this.newer = !!opt.newer
return cb(null, 0)
let bufPos = 0 // do not unpack over ANY files
const headBuf = Buffer.alloc(512) this.keep = !!opt.keep
const onread = (er, bytes) => {
if (er)
return cb(er)
bufPos += bytes
if (bufPos < 512 && bytes) {
return fs.read(
fd, headBuf, bufPos, headBuf.length - bufPos,
position + bufPos, onread
)
}
if (position === 0 && headBuf[0] === 0x1f && headBuf[1] === 0x8b) // do not set mtime/atime of extracted entries
return cb(new Error('cannot append to compressed archives')) this.noMtime = !!opt.noMtime
// truncated header // allow .., absolute path entries, and unpacking through symlinks
if (bufPos < 512) // without this, warn and skip .., relativize absolutes, and error
return cb(null, position) // on symlinks in extraction path
this.preservePaths = !!opt.preservePaths
const h = new Header(headBuf) // unlink files and links before writing. This breaks existing hard
if (!h.cksumValid) // links, and removes symlink directories rather than erroring
return cb(null, position) this.unlink = !!opt.unlink
const entryBlockSize = 512 * Math.ceil(h.size / 512) this.cwd = normPath(path.resolve(opt.cwd || process.cwd()))
if (position + entryBlockSize + 512 > size) this.strip = +opt.strip || 0
return cb(null, position) // if we're not chmodding, then we don't need the process umask
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
position += entryBlockSize + 512 // default mode for dirs created as parents
if (position >= size) this.dmode = opt.dmode || (0o0777 & (~this.umask))
return cb(null, position) this.fmode = opt.fmode || (0o0666 & (~this.umask))
if (opt.mtimeCache) this.on('entry', entry => this[ONENTRY](entry))
opt.mtimeCache.set(h.path, h.mtime) }
bufPos = 0
fs.read(fd, headBuf, 0, 512, position, onread) // a bad or damaged archive is a warning for Parser, but an error
// when extracting. Mark those errors as unrecoverable, because
// the Unpack contract cannot be met.
warn (code, msg, data = {}) {
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT')
data.recoverable = false
return super.warn(code, msg, data)
}
[MAYBECLOSE] () {
if (this[ENDED] && this[PENDING] === 0) {
this.emit('prefinish')
this.emit('finish')
this.emit('end')
this.emit('close')
} }
fs.read(fd, headBuf, 0, 512, position, onread)
} }
const promise = new Promise((resolve, reject) => { [CHECKPATH] (entry) {
p.on('error', reject) if (this.strip) {
let flag = 'r+' const parts = normPath(entry.path).split('/')
const onopen = (er, fd) => { if (parts.length < this.strip)
if (er && er.code === 'ENOENT' && flag === 'r+') { return false
flag = 'w+' entry.path = parts.slice(this.strip).join('/')
return fs.open(opt.file, flag, onopen)
}
if (er) if (entry.type === 'Link') {
return reject(er) const linkparts = normPath(entry.linkpath).split('/')
if (linkparts.length >= this.strip)
entry.linkpath = linkparts.slice(this.strip).join('/')
else
return false
}
}
fs.fstat(fd, (er, st) => { if (!this.preservePaths) {
if (er) const p = normPath(entry.path)
return fs.close(fd, () => reject(er)) const parts = p.split('/')
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
getPos(fd, st.size, (er, position) => { // strip off the root
if (er) const [root, stripped] = stripAbsolutePath(p)
return reject(er) if (root) {
const stream = new fsm.WriteStream(opt.file, { entry.path = stripped
fd: fd, this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, {
start: position, entry,
}) path: p,
p.pipe(stream)
stream.on('error', reject)
stream.on('close', resolve)
addFilesAsync(p, files)
}) })
}
}
if (path.isAbsolute(entry.path))
entry.absolute = normPath(path.resolve(entry.path))
else
entry.absolute = normPath(path.resolve(this.cwd, entry.path))
// if we somehow ended up with a path that escapes the cwd, and we are
// not in preservePaths mode, then something is fishy! This should have
// been prevented above, so ignore this for coverage.
/* istanbul ignore if - defense in depth */
if (!this.preservePaths &&
entry.absolute.indexOf(this.cwd + '/') !== 0 &&
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd,
}) })
return false
} }
fs.open(opt.file, flag, onopen)
})
return cb ? promise.then(cb, cb) : promise // an archive can set properties on the extraction directory, but it
} // may not replace the cwd with a different kind of thing entirely.
if (entry.absolute === this.cwd &&
entry.type !== 'Directory' &&
entry.type !== 'GNUDumpDir')
return false
const addFilesSync = (p, files) => { // only encode : chars that aren't drive letter indicators
files.forEach(file => { if (this.win32) {
if (file.charAt(0) === '@') { const { root: aRoot } = path.win32.parse(entry.absolute)
t({ entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length))
file: path.resolve(p.cwd, file.substr(1)), const { root: pRoot } = path.win32.parse(entry.path)
sync: true, entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length))
noResume: true, }
onentry: entry => p.add(entry),
})
} else
p.add(file)
})
p.end()
}
const addFilesAsync = (p, files) => { return true
while (files.length) {
const file = files.shift()
if (file.charAt(0) === '@') {
return t({
file: path.resolve(p.cwd, file.substr(1)),
noResume: true,
onentry: entry => p.add(entry),
}).then(_ => addFilesAsync(p, files))
} else
p.add(file)
} }
p.end()
}
[ONENTRY] (entry) {
if (!this[CHECKPATH](entry))
return entry.resume()
/***/ }), assert.equal(typeof entry.absolute, 'string')
/***/ 7111: switch (entry.type) {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
// unix absolute paths are also absolute on win32, so we use this for both case 'File':
const { isAbsolute, parse } = (__nccwpck_require__(1017).win32) case 'OldFile':
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
// returns [root, stripped] case 'CharacterDevice':
// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in case 'BlockDevice':
// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / case 'FIFO':
// explicitly if it's the first character. default:
// drive-specific relative paths on Windows get their root stripped off even return this[UNSUPPORTED](entry)
// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] }
module.exports = path => { }
let r = ''
let parsed = parse(path) [ONERROR] (er, entry) {
while (isAbsolute(path) || parsed.root) { // Cwd has to exist, or else nothing works. That's serious.
// windows will think that //x/y/z has a "root" of //x/y/ // Other errors are warnings, which raise the error in strict
// but strip the //?/C:/ off of //?/C:/path // mode, but otherwise continue on.
const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? '/' if (er.name === 'CwdError')
: parsed.root this.emit('error', er)
path = path.substr(root.length) else {
r += root this.warn('TAR_ENTRY_ERROR', er, {entry})
parsed = parse(path) this[UNPEND]()
entry.resume()
}
} }
return [r, path]
}
[MKDIR] (dir, mode, cb) {
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
/***/ }), [DOCHOWN] (entry) {
// in preserve owner mode, chown if the entry doesn't match process
// in set owner mode, chown if setting doesn't match process
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
/***/ 8886: [UID] (entry) {
/***/ ((module) => { return uint32(this.uid, entry.uid, this.processUid)
}
// warning: extremely hot code path. [GID] (entry) {
// This has been meticulously optimized for use return uint32(this.gid, entry.gid, this.processGid)
// within npm install on large package trees.
// Do not edit without careful benchmarking.
module.exports = str => {
let i = str.length - 1
let slashesStart = -1
while (i > -1 && str.charAt(i) === '/') {
slashesStart = i
i--
} }
return slashesStart === -1 ? str : str.slice(0, slashesStart)
}
[FILE] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.fmode
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
/***/ }), // flush all the data out so that we aren't left hanging
// if the error wasn't actually fatal. otherwise the parse
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
/***/ 4173: let actions = 1
/***/ ((__unused_webpack_module, exports) => { const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
"use strict"; this[ONERROR](er, entry)
fullyDone()
return
}
// map types from key to human-friendly name if (--actions === 0) {
exports.name = new Map([ fs.close(stream.fd, er => {
['0', 'File'], if (er)
// same as File this[ONERROR](er, entry)
['', 'OldFile'], else
['1', 'Link'], this[UNPEND]()
['2', 'SymbolicLink'], fullyDone()
// Devices and FIFOs aren't fully supported })
// they are parsed, but skipped when unpacking }
['3', 'CharacterDevice'], }
['4', 'BlockDevice'],
['5', 'Directory'], stream.on('finish', _ => {
['6', 'FIFO'], // if futimes fails, try utimes
// same as File // if utimes fails, fail with the original error
['7', 'ContiguousFile'], // same for fchown/chown
// pax headers const abs = entry.absolute
['g', 'GlobalExtendedHeader'], const fd = stream.fd
['x', 'ExtendedHeader'],
// vendor-specific stuff if (entry.mtime && !this.noMtime) {
// skip actions++
['A', 'SolarisACL'], const atime = entry.atime || new Date()
// like 5, but with data, which should be skipped const mtime = entry.mtime
['D', 'GNUDumpDir'], fs.futimes(fd, atime, mtime, er =>
// metadata only, skip er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
['I', 'Inode'], : done())
// data = link path of next file }
['K', 'NextFileHasLongLinkpath'],
// data = path of next file if (this[DOCHOWN](entry)) {
['L', 'NextFileHasLongPath'], actions++
// skip const uid = this[UID](entry)
['M', 'ContinuationFile'], const gid = this[GID](entry)
// like L fs.fchown(fd, uid, gid, er =>
['N', 'OldGnuLongPath'], er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
// skip : done())
['S', 'SparseFile'], }
// skip
['V', 'TapeVolumeHeader'],
// like x
['X', 'OldExtendedHeader'],
])
// map the other direction done()
exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])) })
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => {
this[ONERROR](er, entry)
fullyDone()
})
entry.pipe(tx)
}
tx.pipe(stream)
}
/***/ }), [DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
/***/ 7628: let actions = 1
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
"use strict"; if (entry.mtime && !this.noMtime) {
actions++
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) {
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. done()
// but the path reservations are required to avoid race conditions where })
// parallelized unpack ops may mess with one another, due to dependencies }
// (like a Link depending on its target) or destructive operations (like
// clobbering an fs object to create one of a different type.)
const assert = __nccwpck_require__(9491) [UNSUPPORTED] (entry) {
const Parser = __nccwpck_require__(8917) entry.unsupported = true
const fs = __nccwpck_require__(7147) this.warn('TAR_ENTRY_UNSUPPORTED',
const fsm = __nccwpck_require__(7714) `unsupported entry type: ${entry.type}`, {entry})
const path = __nccwpck_require__(1017) entry.resume()
const mkdir = __nccwpck_require__(9624) }
const wc = __nccwpck_require__(4808)
const pathReservations = __nccwpck_require__(9587)
const stripAbsolutePath = __nccwpck_require__(7111)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const normalize = __nccwpck_require__(7118)
const ONENTRY = Symbol('onEntry') [SYMLINK] (entry, done) {
const CHECKFS = Symbol('checkFs') this[LINK](entry, entry.linkpath, 'symlink', done)
const CHECKFS2 = Symbol('checkFs2') }
const PRUNECACHE = Symbol('pruneCache')
const ISREUSABLE = Symbol('isReusable')
const MAKEFS = Symbol('makeFs')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const LINK = Symbol('link')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const UNSUPPORTED = Symbol('unsupported')
const CHECKPATH = Symbol('checkPath')
const MKDIR = Symbol('mkdir')
const ONERROR = Symbol('onError')
const PENDING = Symbol('pending')
const PEND = Symbol('pend')
const UNPEND = Symbol('unpend')
const ENDED = Symbol('ended')
const MAYBECLOSE = Symbol('maybeClose')
const SKIP = Symbol('skip')
const DOCHOWN = Symbol('doChown')
const UID = Symbol('uid')
const GID = Symbol('gid')
const CHECKED_CWD = Symbol('checkedCwd')
const crypto = __nccwpck_require__(6113)
const getFlag = __nccwpck_require__(1172)
const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform
const isWindows = platform === 'win32'
// Unlinks on Windows are not atomic. [HARDLINK] (entry, done) {
// const linkpath = normPath(path.resolve(this.cwd, entry.linkpath))
// This means that if you have a file entry, followed by another this[LINK](entry, linkpath, 'link', done)
// file entry with an identical name, and you cannot re-use the file }
// (because it's a hardlink, or because unlink:true is set, or it's
// Windows, which does not have useful nlink values), then the unlink
// will be committed to the disk AFTER the new file has been written
// over the old one, deleting the new file.
//
// To work around this, on Windows systems, we rename the file and then
// delete the renamed file. It's a sloppy kludge, but frankly, I do not
// know of a better way to do this, given windows' non-atomic unlink
// semantics.
//
// See: https://github.com/npm/node-tar/issues/183
/* istanbul ignore next */
const unlinkFile = (path, cb) => {
if (!isWindows)
return fs.unlink(path, cb)
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') [PEND] () {
fs.rename(path, name, er => { this[PENDING]++
if (er) }
return cb(er)
fs.unlink(name, cb)
})
}
/* istanbul ignore next */ [UNPEND] () {
const unlinkFileSync = path => { this[PENDING]--
if (!isWindows) this[MAYBECLOSE]()
return fs.unlinkSync(path) }
const name = path + '.DELETE.' + crypto.randomBytes(16).toString('hex') [SKIP] (entry) {
fs.renameSync(path, name) this[UNPEND]()
fs.unlinkSync(name) entry.resume()
} }
// this.gid, entry.gid, this.processUid // Check if we can reuse an existing filesystem entry safely and
const uint32 = (a, b, c) => // overwrite it, rather than unlinking and recreating
a === a >>> 0 ? a // Windows doesn't report a useful nlink, so we just never reuse entries
: b === b >>> 0 ? b [ISREUSABLE] (entry, st) {
: c return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
// clear the cache if it's a case-insensitive unicode-squashing match. // check if a thing is there, and if so, try to clobber it
// we can't know if the current file system is case-sensitive or supports [CHECKFS] (entry) {
// unicode fully, so we check for similarity on the maximally compatible this[PEND]()
// representation. Err on the side of pruning, since all it's doing is const paths = [entry.path]
// preventing lstats, and it's not the end of the world if we get a false if (entry.linkpath)
// positive. paths.push(entry.linkpath)
// Note that on windows, we always drop the entire cache whenever a this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
// symbolic link is encountered, because 8.3 filenames are impossible }
// to reason about, and collisions are hazards rather than just failures.
const cacheKeyNormalize = path => normalize(stripSlash(normPath(path)))
.toLowerCase()
const pruneCache = (cache, abs) => { [PRUNECACHE] (entry) {
abs = cacheKeyNormalize(abs) // if we are not creating a directory, and the path is in the dirCache,
for (const path of cache.keys()) { // then that means we are about to delete the directory we created
const pnorm = cacheKeyNormalize(path) // previously, and it is no longer going to be a directory, and neither
if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) // is any of its children.
cache.delete(path) // If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink')
dropCache(this.dirCache)
else if (entry.type !== 'Directory')
pruneCache(this.dirCache, entry.absolute)
} }
}
const dropCache = cache => { [CHECKFS2] (entry, fullyDone) {
for (const key of cache.keys()) this[PRUNECACHE](entry)
cache.delete(key)
}
class Unpack extends Parser { const done = er => {
constructor (opt) { this[PRUNECACHE](entry)
if (!opt) fullyDone(er)
opt = {} }
opt.ondone = _ => { const checkCwd = () => {
this[ENDED] = true this[MKDIR](this.cwd, this.dmode, er => {
this[MAYBECLOSE]() if (er) {
this[ONERROR](er, entry)
done()
return
}
this[CHECKED_CWD] = true
start()
})
}
const start = () => {
if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
return this[MKDIR](parent, this.dmode, er => {
if (er) {
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
}
afterMakeParent()
} }
super(opt) const afterMakeParent = () => {
fs.lstat(entry.absolute, (lstatEr, st) => {
this[CHECKED_CWD] = false if (st && (this.keep || this.newer && st.mtime > entry.mtime)) {
this[SKIP](entry)
this.reservations = pathReservations() done()
return
}
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry, done)
this.transform = typeof opt.transform === 'function' ? opt.transform : null if (st.isDirectory()) {
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod)
return afterChmod()
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
this.writable = true // not a dir, and not reusable
this.readable = false // don't remove if the cwd, we want that error
if (entry.absolute === this.cwd)
return this[MAKEFS](null, entry, done)
this[PENDING] = 0 unlinkFile(entry.absolute, er =>
this[ENDED] = false this[MAKEFS](er, entry, done))
})
}
this.dirCache = opt.dirCache || new Map() if (this[CHECKED_CWD])
start()
else
checkCwd()
}
if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { [MAKEFS] (er, entry, done) {
// need both or neither if (er) {
if (typeof opt.uid !== 'number' || typeof opt.gid !== 'number') this[ONERROR](er, entry)
throw new TypeError('cannot set owner without number uid and gid') done()
if (opt.preserveOwner) { return
throw new TypeError(
'cannot preserve owner in archive and also set owner explicitly')
}
this.uid = opt.uid
this.gid = opt.gid
this.setOwner = true
} else {
this.uid = null
this.gid = null
this.setOwner = false
} }
// default true for root switch (entry.type) {
if (opt.preserveOwner === undefined && typeof opt.uid !== 'number') case 'File':
this.preserveOwner = process.getuid && process.getuid() === 0 case 'OldFile':
else case 'ContiguousFile':
this.preserveOwner = !!opt.preserveOwner return this[FILE](entry, done)
this.processUid = (this.preserveOwner || this.setOwner) && process.getuid ? case 'Link':
process.getuid() : null return this[HARDLINK](entry, done)
this.processGid = (this.preserveOwner || this.setOwner) && process.getgid ?
process.getgid() : null
// mostly just for testing, but useful in some cases. case 'SymbolicLink':
// Forcibly trigger a chown on every entry, no matter what return this[SYMLINK](entry, done)
this.forceChown = opt.forceChown === true
// turn ><?| in filenames into 0xf000-higher encoded forms case 'Directory':
this.win32 = !!opt.win32 || isWindows case 'GNUDumpDir':
return this[DIRECTORY](entry, done)
}
}
// do not unpack over files that are newer than what's in the archive [LINK] (entry, linkpath, link, done) {
this.newer = !!opt.newer // XXX: get the type ('symlink' or 'junction') for windows
fs[link](linkpath, entry.absolute, er => {
if (er)
this[ONERROR](er, entry)
else {
this[UNPEND]()
entry.resume()
}
done()
})
}
}
// do not unpack over ANY files const callSync = fn => {
this.keep = !!opt.keep try {
return [null, fn()]
} catch (er) {
return [er, null]
}
}
class UnpackSync extends Unpack {
[MAKEFS] (er, entry) {
return super[MAKEFS](er, entry, () => {})
}
// do not set mtime/atime of extracted entries [CHECKFS] (entry) {
this.noMtime = !!opt.noMtime this[PRUNECACHE](entry)
// allow .., absolute path entries, and unpacking through symlinks if (!this[CHECKED_CWD]) {
// without this, warn and skip .., relativize absolutes, and error const er = this[MKDIR](this.cwd, this.dmode)
// on symlinks in extraction path if (er)
this.preservePaths = !!opt.preservePaths return this[ONERROR](er, entry)
this[CHECKED_CWD] = true
}
// unlink files and links before writing. This breaks existing hard // don't bother to make the parent if the current entry is the cwd,
// links, and removes symlink directories rather than erroring // we've already checked it.
this.unlink = !!opt.unlink if (entry.absolute !== this.cwd) {
const parent = normPath(path.dirname(entry.absolute))
if (parent !== this.cwd) {
const mkParent = this[MKDIR](parent, this.dmode)
if (mkParent)
return this[ONERROR](mkParent, entry)
}
}
this.cwd = normPath(path.resolve(opt.cwd || process.cwd())) const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute))
this.strip = +opt.strip || 0 if (st && (this.keep || this.newer && st.mtime > entry.mtime))
// if we're not chmodding, then we don't need the process umask return this[SKIP](entry)
this.processUmask = opt.noChmod ? 0 : process.umask()
this.umask = typeof opt.umask === 'number' ? opt.umask : this.processUmask
// default mode for dirs created as parents if (lstatEr || this[ISREUSABLE](entry, st))
this.dmode = opt.dmode || (0o0777 & (~this.umask)) return this[MAKEFS](null, entry)
this.fmode = opt.fmode || (0o0666 & (~this.umask))
this.on('entry', entry => this[ONENTRY](entry)) if (st.isDirectory()) {
} if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
}
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
}
// a bad or damaged archive is a warning for Parser, but an error // not a dir, and not reusable.
// when extracting. Mark those errors as unrecoverable, because // don't remove if it's the cwd, since we want that error.
// the Unpack contract cannot be met. const [er] = entry.absolute === this.cwd ? []
warn (code, msg, data = {}) { : callSync(() => unlinkFileSync(entry.absolute))
if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') this[MAKEFS](er, entry)
data.recoverable = false
return super.warn(code, msg, data)
} }
[MAYBECLOSE] () { [FILE] (entry, done) {
if (this[ENDED] && this[PENDING] === 0) { const mode = entry.mode & 0o7777 || this.fmode
this.emit('prefinish')
this.emit('finish') const oner = er => {
this.emit('end') let closeError
this.emit('close') try {
fs.closeSync(fd)
} catch (e) {
closeError = e
}
if (er || closeError)
this[ONERROR](er || closeError, entry)
done()
} }
}
[CHECKPATH] (entry) { let fd
if (this.strip) { try {
const parts = normPath(entry.path).split('/') fd = fs.openSync(entry.absolute, getFlag(entry.size), mode)
if (parts.length < this.strip) } catch (er) {
return false return oner(er)
entry.path = parts.slice(this.strip).join('/') }
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) {
tx.on('error', er => this[ONERROR](er, entry))
entry.pipe(tx)
}
tx.on('data', chunk => {
try {
fs.writeSync(fd, chunk, 0, chunk.length)
} catch (er) {
oner(er)
}
})
tx.on('end', _ => {
let er = null
// try both, falling futimes back to utimes
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) {
const uid = this[UID](entry)
const gid = this[GID](entry)
if (entry.type === 'Link') { try {
const linkparts = normPath(entry.linkpath).split('/') fs.fchownSync(fd, uid, gid)
if (linkparts.length >= this.strip) } catch (fchowner) {
entry.linkpath = linkparts.slice(this.strip).join('/') try {
else fs.chownSync(entry.absolute, uid, gid)
return false } catch (chowner) {
er = er || fchowner
}
}
} }
}
if (!this.preservePaths) { oner(er)
const p = normPath(entry.path) })
const parts = p.split('/') }
if (parts.includes('..') || isWindows && /^[a-z]:\.\.$/i.test(parts[0])) {
this.warn('TAR_ENTRY_ERROR', `path contains '..'`, {
entry,
path: p,
})
return false
}
// strip off the root [DIRECTORY] (entry, done) {
const [root, stripped] = stripAbsolutePath(p) const mode = entry.mode & 0o7777 || this.dmode
if (root) { const er = this[MKDIR](entry.absolute, mode)
entry.path = stripped if (er) {
this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { this[ONERROR](er, entry)
entry, done()
path: p, return
})
}
} }
if (entry.mtime && !this.noMtime) {
try {
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime)
} catch (er) {}
}
if (this[DOCHOWN](entry)) {
try {
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {}
}
done()
entry.resume()
}
if (path.isAbsolute(entry.path)) [MKDIR] (dir, mode) {
entry.absolute = normPath(path.resolve(entry.path)) try {
else return mkdir.sync(normPath(dir), {
entry.absolute = normPath(path.resolve(this.cwd, entry.path)) uid: this.uid,
gid: this.gid,
// if we somehow ended up with a path that escapes the cwd, and we are processUid: this.processUid,
// not in preservePaths mode, then something is fishy! This should have processGid: this.processGid,
// been prevented above, so ignore this for coverage. umask: this.processUmask,
/* istanbul ignore if - defense in depth */ preserve: this.preservePaths,
if (!this.preservePaths && unlink: this.unlink,
entry.absolute.indexOf(this.cwd + '/') !== 0 && cache: this.dirCache,
entry.absolute !== this.cwd) {
this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', {
entry,
path: normPath(entry.path),
resolvedPath: entry.absolute,
cwd: this.cwd, cwd: this.cwd,
mode: mode,
}) })
return false } catch (er) {
return er
} }
}
// an archive can set properties on the extraction directory, but it [LINK] (entry, linkpath, link, done) {
// may not replace the cwd with a different kind of thing entirely. try {
if (entry.absolute === this.cwd && fs[link + 'Sync'](linkpath, entry.absolute)
entry.type !== 'Directory' && done()
entry.type !== 'GNUDumpDir') entry.resume()
return false } catch (er) {
return this[ONERROR](er, entry)
// only encode : chars that aren't drive letter indicators
if (this.win32) {
const { root: aRoot } = path.win32.parse(entry.absolute)
entry.absolute = aRoot + wc.encode(entry.absolute.substr(aRoot.length))
const { root: pRoot } = path.win32.parse(entry.path)
entry.path = pRoot + wc.encode(entry.path.substr(pRoot.length))
} }
return true
} }
}
[ONENTRY] (entry) { Unpack.Sync = UnpackSync
if (!this[CHECKPATH](entry)) module.exports = Unpack
return entry.resume()
assert.equal(typeof entry.absolute, 'string')
switch (entry.type) { /***/ }),
case 'Directory':
case 'GNUDumpDir':
if (entry.mode)
entry.mode = entry.mode | 0o700
case 'File': /***/ 407:
case 'OldFile': /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
case 'ContiguousFile':
case 'Link':
case 'SymbolicLink':
return this[CHECKFS](entry)
case 'CharacterDevice': "use strict";
case 'BlockDevice':
case 'FIFO':
default:
return this[UNSUPPORTED](entry)
}
}
[ONERROR] (er, entry) {
// Cwd has to exist, or else nothing works. That's serious.
// Other errors are warnings, which raise the error in strict
// mode, but otherwise continue on.
if (er.name === 'CwdError')
this.emit('error', er)
else {
this.warn('TAR_ENTRY_ERROR', er, {entry})
this[UNPEND]()
entry.resume()
}
}
[MKDIR] (dir, mode, cb) { // tar -u
mkdir(normPath(dir), {
uid: this.uid,
gid: this.gid,
processUid: this.processUid,
processGid: this.processGid,
umask: this.processUmask,
preserve: this.preservePaths,
unlink: this.unlink,
cache: this.dirCache,
cwd: this.cwd,
mode: mode,
noChmod: this.noChmod,
}, cb)
}
[DOCHOWN] (entry) { const hlo = __nccwpck_require__(5274)
// in preserve owner mode, chown if the entry doesn't match process const r = __nccwpck_require__(5923)
// in set owner mode, chown if setting doesn't match process // just call tar.r with the filter and mtimeCache
return this.forceChown ||
this.preserveOwner &&
(typeof entry.uid === 'number' && entry.uid !== this.processUid ||
typeof entry.gid === 'number' && entry.gid !== this.processGid)
||
(typeof this.uid === 'number' && this.uid !== this.processUid ||
typeof this.gid === 'number' && this.gid !== this.processGid)
}
[UID] (entry) { module.exports = (opt_, files, cb) => {
return uint32(this.uid, entry.uid, this.processUid) const opt = hlo(opt_)
}
[GID] (entry) { if (!opt.file)
return uint32(this.gid, entry.gid, this.processGid) throw new TypeError('file is required')
}
[FILE] (entry, fullyDone) { if (opt.gzip)
const mode = entry.mode & 0o7777 || this.fmode throw new TypeError('cannot append to compressed archives')
const stream = new fsm.WriteStream(entry.absolute, {
flags: getFlag(entry.size),
mode: mode,
autoClose: false,
})
stream.on('error', er => {
if (stream.fd)
fs.close(stream.fd, () => {})
// flush all the data out so that we aren't left hanging if (!files || !Array.isArray(files) || !files.length)
// if the error wasn't actually fatal. otherwise the parse throw new TypeError('no files or directories specified')
// is blocked, and we never proceed.
stream.write = () => true
this[ONERROR](er, entry)
fullyDone()
})
let actions = 1 files = Array.from(files)
const done = er => {
if (er) {
/* istanbul ignore else - we should always have a fd by now */
if (stream.fd)
fs.close(stream.fd, () => {})
this[ONERROR](er, entry) mtimeFilter(opt)
fullyDone() return r(opt, files, cb)
return }
}
if (--actions === 0) { const mtimeFilter = opt => {
fs.close(stream.fd, er => { const filter = opt.filter
if (er)
this[ONERROR](er, entry) if (!opt.mtimeCache)
else opt.mtimeCache = new Map()
this[UNPEND]()
fullyDone() opt.filter = filter ? (path, stat) =>
}) filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
} : (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
} }
stream.on('finish', _ => {
// if futimes fails, try utimes
// if utimes fails, fail with the original error
// same for fchown/chown
const abs = entry.absolute
const fd = stream.fd
if (entry.mtime && !this.noMtime) { /***/ }),
actions++
const atime = entry.atime || new Date()
const mtime = entry.mtime
fs.futimes(fd, atime, mtime, er =>
er ? fs.utimes(abs, atime, mtime, er2 => done(er2 && er))
: done())
}
if (this[DOCHOWN](entry)) { /***/ 5899:
actions++ /***/ ((module) => {
const uid = this[UID](entry)
const gid = this[GID](entry)
fs.fchown(fd, uid, gid, er =>
er ? fs.chown(abs, uid, gid, er2 => done(er2 && er))
: done())
}
done() "use strict";
})
const tx = this.transform ? this.transform(entry) || entry : entry module.exports = Base => class extends Base {
if (tx !== entry) { warn (code, message, data = {}) {
tx.on('error', er => { if (this.file)
this[ONERROR](er, entry) data.file = this.file
fullyDone() if (this.cwd)
}) data.cwd = this.cwd
entry.pipe(tx) data.code = message instanceof Error && message.code || code
} data.tarCode = code
tx.pipe(stream) if (!this.strict && data.recoverable !== false) {
if (message instanceof Error) {
data = Object.assign(message, data)
message = message.message
}
this.emit('warn', data.tarCode, message, data)
} else if (message instanceof Error)
this.emit('error', Object.assign(message, data))
else
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
} }
}
[DIRECTORY] (entry, fullyDone) {
const mode = entry.mode & 0o7777 || this.dmode
this[MKDIR](entry.absolute, mode, er => {
if (er) {
this[ONERROR](er, entry)
fullyDone()
return
}
let actions = 1 /***/ }),
const done = _ => {
if (--actions === 0) {
fullyDone()
this[UNPEND]()
entry.resume()
}
}
if (entry.mtime && !this.noMtime) { /***/ 4808:
actions++ /***/ ((module) => {
fs.utimes(entry.absolute, entry.atime || new Date(), entry.mtime, done)
}
if (this[DOCHOWN](entry)) { "use strict";
actions++
fs.chown(entry.absolute, this[UID](entry), this[GID](entry), done)
}
done()
})
}
[UNSUPPORTED] (entry) { // When writing files on Windows, translate the characters to their
entry.unsupported = true // 0xf000 higher-encoded versions.
this.warn('TAR_ENTRY_UNSUPPORTED',
`unsupported entry type: ${entry.type}`, {entry})
entry.resume()
}
[SYMLINK] (entry, done) { const raw = [
this[LINK](entry, entry.linkpath, 'symlink', done) '|',
} '<',
'>',
'?',
':',
]
[HARDLINK] (entry, done) { const win = raw.map(char =>
const linkpath = normPath(path.resolve(this.cwd, entry.linkpath)) String.fromCharCode(0xf000 + char.charCodeAt(0)))
this[LINK](entry, linkpath, 'link', done)
}
[PEND] () { const toWin = new Map(raw.map((char, i) => [char, win[i]]))
this[PENDING]++ const toRaw = new Map(win.map((char, i) => [char, raw[i]]))
}
[UNPEND] () { module.exports = {
this[PENDING]-- encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s),
this[MAYBECLOSE]() decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s),
} }
[SKIP] (entry) {
this[UNPEND]()
entry.resume()
}
// Check if we can reuse an existing filesystem entry safely and /***/ }),
// overwrite it, rather than unlinking and recreating
// Windows doesn't report a useful nlink, so we just never reuse entries
[ISREUSABLE] (entry, st) {
return entry.type === 'File' &&
!this.unlink &&
st.isFile() &&
st.nlink <= 1 &&
!isWindows
}
// check if a thing is there, and if so, try to clobber it /***/ 5450:
[CHECKFS] (entry) { /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
this[PEND]()
const paths = [entry.path]
if (entry.linkpath)
paths.push(entry.linkpath)
this.reservations.reserve(paths, done => this[CHECKFS2](entry, done))
}
[PRUNECACHE] (entry) { "use strict";
// if we are not creating a directory, and the path is in the dirCache,
// then that means we are about to delete the directory we created
// previously, and it is no longer going to be a directory, and neither
// is any of its children.
// If a symbolic link is encountered, all bets are off. There is no
// reasonable way to sanitize the cache in such a way we will be able to
// avoid having filesystem collisions. If this happens with a non-symlink
// entry, it'll just fail to unpack, but a symlink to a directory, using an
// 8.3 shortname or certain unicode attacks, can evade detection and lead
// to arbitrary writes to anywhere on the system.
if (entry.type === 'SymbolicLink')
dropCache(this.dirCache)
else if (entry.type !== 'Directory')
pruneCache(this.dirCache, entry.absolute)
}
[CHECKFS2] (entry, fullyDone) { const MiniPass = __nccwpck_require__(6684)
this[PRUNECACHE](entry) const Pax = __nccwpck_require__(7996)
const Header = __nccwpck_require__(6043)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const done = er => { const prefixPath = (path, prefix) => {
this[PRUNECACHE](entry) if (!prefix)
fullyDone(er) return normPath(path)
} path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
const checkCwd = () => { const maxReadSize = 16 * 1024 * 1024
this[MKDIR](this.cwd, this.dmode, er => { const PROCESS = Symbol('process')
if (er) { const FILE = Symbol('file')
this[ONERROR](er, entry) const DIRECTORY = Symbol('directory')
done() const SYMLINK = Symbol('symlink')
return const HARDLINK = Symbol('hardlink')
} const HEADER = Symbol('header')
this[CHECKED_CWD] = true const READ = Symbol('read')
start() const LSTAT = Symbol('lstat')
}) const ONLSTAT = Symbol('onlstat')
} const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = __nccwpck_require__(5899)
const winchars = __nccwpck_require__(4808)
const stripAbsolutePath = __nccwpck_require__(7111)
const modeFix = __nccwpck_require__(8371)
const WriteEntry = warner(class WriteEntry extends MiniPass {
constructor (p, opt) {
opt = opt || {}
super(opt)
if (typeof p !== 'string')
throw new TypeError('path is required')
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
this.fd = null
this.blockLen = null
this.blockRemain = null
this.buf = null
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
const start = () => { let pathWarn = false
if (entry.absolute !== this.cwd) { if (!this.preservePaths) {
const parent = normPath(path.dirname(entry.absolute)) const [root, stripped] = stripAbsolutePath(this.path)
if (parent !== this.cwd) { if (root) {
return this[MKDIR](parent, this.dmode, er => { this.path = stripped
if (er) { pathWarn = root
this[ONERROR](er, entry)
done()
return
}
afterMakeParent()
})
}
} }
afterMakeParent()
} }
const afterMakeParent = () => { this.win32 = !!opt.win32 || process.platform === 'win32'
fs.lstat(entry.absolute, (lstatEr, st) => { if (this.win32) {
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) { // force the \ to / normalization, since we might not *actually*
this[SKIP](entry) // be on windows, but want \ to be considered a path separator.
done() this.path = winchars.decode(this.path.replace(/\\/g, '/'))
return p = p.replace(/\\/g, '/')
} }
if (lstatEr || this[ISREUSABLE](entry, st))
return this[MAKEFS](null, entry, done)
if (st.isDirectory()) { this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p))
if (entry.type === 'Directory') {
const needChmod = !this.noChmod &&
entry.mode &&
(st.mode & 0o7777) !== entry.mode
const afterChmod = er => this[MAKEFS](er, entry, done)
if (!needChmod)
return afterChmod()
return fs.chmod(entry.absolute, entry.mode, afterChmod)
}
// Not a dir entry, have to remove it.
// NB: the only way to end up with an entry that is the cwd
// itself, in such a way that == does not detect, is a
// tricky windows absolute path with UNC or 8.3 parts (and
// preservePaths:true, or else it will have been stripped).
// In that case, the user has opted out of path protections
// explicitly, so if they blow away the cwd, c'est la vie.
if (entry.absolute !== this.cwd) {
return fs.rmdir(entry.absolute, er =>
this[MAKEFS](er, entry, done))
}
}
// not a dir, and not reusable if (this.path === '')
// don't remove if the cwd, we want that error this.path = './'
if (entry.absolute === this.cwd)
return this[MAKEFS](null, entry, done)
unlinkFile(entry.absolute, er => if (pathWarn) {
this[MAKEFS](er, entry, done)) this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
}) })
} }
if (this[CHECKED_CWD]) if (this.statCache.has(this.absolute))
start() this[ONLSTAT](this.statCache.get(this.absolute))
else else
checkCwd() this[LSTAT]()
} }
[MAKEFS] (er, entry, done) { emit (ev, ...data) {
if (er) { if (ev === 'error')
this[ONERROR](er, entry) this[HAD_ERROR] = true
done() return super.emit(ev, ...data)
return }
[LSTAT] () {
fs.lstat(this.absolute, (er, stat) => {
if (er)
return this.emit('error', er)
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) {
this.statCache.set(this.absolute, stat)
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () {
switch (this.type) {
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
} }
}
switch (entry.type) { [MODE] (mode) {
case 'File': return modeFix(mode, this.type === 'Directory', this.portable)
case 'OldFile': }
case 'ContiguousFile':
return this[FILE](entry, done)
case 'Link': [PREFIX] (path) {
return this[HARDLINK](entry, done) return prefixPath(path, this.prefix)
}
case 'SymbolicLink': [HEADER] () {
return this[SYMLINK](entry, done) if (this.type === 'Directory' && this.portable)
this.noMtime = true
case 'Directory': this.header = new Header({
case 'GNUDumpDir': path: this[PREFIX](this.path),
return this[DIRECTORY](entry, done) // only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this.header.encode() && !this.noPax) {
super.write(new Pax({
atime: this.portable ? null : this.header.atime,
ctime: this.portable ? null : this.header.ctime,
gid: this.portable ? null : this.header.gid,
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
} }
super.write(this.header.block)
} }
[LINK] (entry, linkpath, link, done) { [DIRECTORY] () {
// XXX: get the type ('symlink' or 'junction') for windows if (this.path.substr(-1) !== '/')
fs[link](linkpath, entry.absolute, er => { this.path += '/'
this.stat.size = 0
this[HEADER]()
this.end()
}
[SYMLINK] () {
fs.readlink(this.absolute, (er, linkpath) => {
if (er) if (er)
this[ONERROR](er, entry) return this.emit('error', er)
else { this[ONREADLINK](linkpath)
this[UNPEND]() })
entry.resume() }
[ONREADLINK] (linkpath) {
this.linkpath = normPath(linkpath)
this[HEADER]()
this.end()
}
[HARDLINK] (linkpath) {
this.type = 'Link'
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
}
[FILE] () {
if (this.stat.nlink > 1) {
const linkKey = this.stat.dev + ':' + this.stat.ino
if (this.linkCache.has(linkKey)) {
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
} }
done() this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]()
if (this.stat.size === 0)
return this.end()
this[OPENFILE]()
}
[OPENFILE] () {
fs.open(this.absolute, 'r', (er, fd) => {
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
}) })
} }
}
const callSync = fn => { [ONOPENFILE] (fd) {
try { this.fd = fd
return [null, fn()] if (this[HAD_ERROR])
} catch (er) { return this[CLOSE]()
return [er, null]
this.blockLen = 512 * Math.ceil(this.stat.size / 512)
this.blockRemain = this.blockLen
const bufLen = Math.min(this.blockLen, this.maxReadSize)
this.buf = Buffer.allocUnsafe(bufLen)
this.offset = 0
this.pos = 0
this.remain = this.stat.size
this.length = this.buf.length
this[READ]()
} }
}
class UnpackSync extends Unpack { [READ] () {
[MAKEFS] (er, entry) { const { fd, buf, offset, length, pos } = this
return super[MAKEFS](er, entry, () => {}) fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
} }
[CHECKFS] (entry) { [CLOSE] (cb) {
this[PRUNECACHE](entry) fs.close(this.fd, cb)
}
if (!this[CHECKED_CWD]) { [ONREAD] (bytesRead) {
const er = this[MKDIR](this.cwd, this.dmode) if (bytesRead <= 0 && this.remain > 0) {
if (er) const er = new Error('encountered unexpected EOF')
return this[ONERROR](er, entry) er.path = this.absolute
this[CHECKED_CWD] = true er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
} }
// don't bother to make the parent if the current entry is the cwd, if (bytesRead > this.remain) {
// we've already checked it. const er = new Error('did not encounter expected EOF')
if (entry.absolute !== this.cwd) { er.path = this.absolute
const parent = normPath(path.dirname(entry.absolute)) er.syscall = 'read'
if (parent !== this.cwd) { er.code = 'EOF'
const mkParent = this[MKDIR](parent, this.dmode) return this[CLOSE](() => this.emit('error', er))
if (mkParent)
return this[ONERROR](mkParent, entry)
}
} }
const [lstatEr, st] = callSync(() => fs.lstatSync(entry.absolute)) // null out the rest of the buffer, if we could fit the block padding
if (st && (this.keep || this.newer && st.mtime > entry.mtime)) // at the end of this loop, we've incremented bytesRead and this.remain
return this[SKIP](entry) // to be incremented up to the blockRemain level, as if we had expected
// to get a null-padded file, and read it until the end. then we will
if (lstatEr || this[ISREUSABLE](entry, st)) // decrement both remain and blockRemain by bytesRead, and know that we
return this[MAKEFS](null, entry) // reached the expected EOF, without any null buffer to append.
if (bytesRead === this.remain) {
if (st.isDirectory()) { for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) {
if (entry.type === 'Directory') { this.buf[i + this.offset] = 0
const needChmod = !this.noChmod && bytesRead++
entry.mode && this.remain++
(st.mode & 0o7777) !== entry.mode
const [er] = needChmod ? callSync(() => {
fs.chmodSync(entry.absolute, entry.mode)
}) : []
return this[MAKEFS](er, entry)
} }
// not a dir entry, have to remove it
const [er] = callSync(() => fs.rmdirSync(entry.absolute))
this[MAKEFS](er, entry)
} }
// not a dir, and not reusable. const writeBuf = this.offset === 0 && bytesRead === this.buf.length ?
// don't remove if it's the cwd, since we want that error. this.buf : this.buf.slice(this.offset, this.offset + bytesRead)
const [er] = entry.absolute === this.cwd ? []
: callSync(() => unlinkFileSync(entry.absolute)) const flushed = this.write(writeBuf)
this[MAKEFS](er, entry) if (!flushed)
this[AWAITDRAIN](() => this[ONDRAIN]())
else
this[ONDRAIN]()
} }
[FILE] (entry, done) { [AWAITDRAIN] (cb) {
const mode = entry.mode & 0o7777 || this.fmode this.once('drain', cb)
}
const oner = er => { write (writeBuf) {
let closeError if (this.blockRemain < writeBuf.length) {
try { const er = new Error('writing more data than expected')
fs.closeSync(fd) er.path = this.absolute
} catch (e) { return this.emit('error', er)
closeError = e
}
if (er || closeError)
this[ONERROR](er || closeError, entry)
done()
} }
this.remain -= writeBuf.length
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
}
let fd [ONDRAIN] () {
try { if (!this.remain) {
fd = fs.openSync(entry.absolute, getFlag(entry.size), mode) if (this.blockRemain)
} catch (er) { super.write(Buffer.alloc(this.blockRemain))
return oner(er) return this[CLOSE](er => er ? this.emit('error', er) : this.end())
} }
const tx = this.transform ? this.transform(entry) || entry : entry
if (tx !== entry) { if (this.offset >= this.length) {
tx.on('error', er => this[ONERROR](er, entry)) // if we only have a smaller bit left to read, alloc a smaller buffer
entry.pipe(tx) // otherwise, keep it the same length it was before.
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length))
this.offset = 0
} }
this.length = this.buf.length - this.offset
this[READ]()
}
})
tx.on('data', chunk => { class WriteEntrySync extends WriteEntry {
try { [LSTAT] () {
fs.writeSync(fd, chunk, 0, chunk.length) this[ONLSTAT](fs.lstatSync(this.absolute))
} catch (er) { }
oner(er)
}
})
tx.on('end', _ => { [SYMLINK] () {
let er = null this[ONREADLINK](fs.readlinkSync(this.absolute))
// try both, falling futimes back to utimes }
// if either fails, handle the first error
if (entry.mtime && !this.noMtime) {
const atime = entry.atime || new Date()
const mtime = entry.mtime
try {
fs.futimesSync(fd, atime, mtime)
} catch (futimeser) {
try {
fs.utimesSync(entry.absolute, atime, mtime)
} catch (utimeser) {
er = futimeser
}
}
}
if (this[DOCHOWN](entry)) { [OPENFILE] () {
const uid = this[UID](entry) this[ONOPENFILE](fs.openSync(this.absolute, 'r'))
const gid = this[GID](entry) }
[READ] () {
let threw = true
try {
const { fd, buf, offset, length, pos } = this
const bytesRead = fs.readSync(fd, buf, offset, length, pos)
this[ONREAD](bytesRead)
threw = false
} finally {
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try { try {
fs.fchownSync(fd, uid, gid) this[CLOSE](() => {})
} catch (fchowner) { } catch (er) {}
try {
fs.chownSync(entry.absolute, uid, gid)
} catch (chowner) {
er = er || fchowner
}
}
} }
}
}
oner(er) [AWAITDRAIN] (cb) {
}) cb()
} }
[DIRECTORY] (entry, done) { [CLOSE] (cb) {
const mode = entry.mode & 0o7777 || this.dmode fs.closeSync(this.fd)
const er = this[MKDIR](entry.absolute, mode) cb()
if (er) { }
this[ONERROR](er, entry) }
done()
return const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
} constructor (readEntry, opt) {
if (entry.mtime && !this.noMtime) { opt = opt || {}
try { super(opt)
fs.utimesSync(entry.absolute, entry.atime || new Date(), entry.mtime) this.preservePaths = !!opt.preservePaths
} catch (er) {} this.portable = !!opt.portable
} this.strict = !!opt.strict
if (this[DOCHOWN](entry)) { this.noPax = !!opt.noPax
try { this.noMtime = !!opt.noMtime
fs.chownSync(entry.absolute, this[UID](entry), this[GID](entry))
} catch (er) {} this.readEntry = readEntry
this.type = readEntry.type
if (this.type === 'Directory' && this.portable)
this.noMtime = true
this.prefix = opt.prefix || null
this.path = normPath(readEntry.path)
this.mode = this[MODE](readEntry.mode)
this.uid = this.portable ? null : readEntry.uid
this.gid = this.portable ? null : readEntry.gid
this.uname = this.portable ? null : readEntry.uname
this.gname = this.portable ? null : readEntry.gname
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
if (typeof opt.onwarn === 'function')
this.on('warn', opt.onwarn)
let pathWarn = false
if (!this.preservePaths) {
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
} }
done()
entry.resume()
}
[MKDIR] (dir, mode) { this.remain = readEntry.size
try { this.blockRemain = readEntry.startBlockSize
return mkdir.sync(normPath(dir), {
uid: this.uid, this.header = new Header({
gid: this.gid, path: this[PREFIX](this.path),
processUid: this.processUid, linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
processGid: this.processGid, : this.linkpath,
umask: this.processUmask, // only the permissions and setuid/setgid/sticky bitflags
preserve: this.preservePaths, // not the higher-order bits that specify file type
unlink: this.unlink, mode: this.mode,
cache: this.dirCache, uid: this.portable ? null : this.uid,
cwd: this.cwd, gid: this.portable ? null : this.gid,
mode: mode, size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
if (pathWarn) {
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, {
entry: this,
path: pathWarn + this.path,
}) })
} catch (er) {
return er
} }
}
[LINK] (entry, linkpath, link, done) { if (this.header.encode() && !this.noPax) {
try { super.write(new Pax({
fs[link + 'Sync'](linkpath, entry.absolute) atime: this.portable ? null : this.atime,
done() ctime: this.portable ? null : this.ctime,
entry.resume() gid: this.portable ? null : this.gid,
} catch (er) { mtime: this.noMtime ? null : this.mtime,
return this[ONERROR](er, entry) path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
} }
}
}
Unpack.Sync = UnpackSync
module.exports = Unpack
super.write(this.header.block)
readEntry.pipe(this)
}
/***/ }), [PREFIX] (path) {
return prefixPath(path, this.prefix)
}
/***/ 407: [MODE] (mode) {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { return modeFix(mode, this.type === 'Directory', this.portable)
}
"use strict"; write (data) {
const writeLen = data.length
if (writeLen > this.blockRemain)
throw new Error('writing more to entry than is appropriate')
this.blockRemain -= writeLen
return super.write(data)
}
end () {
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return super.end()
}
})
// tar -u WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const hlo = __nccwpck_require__(5274) const getType = stat =>
const r = __nccwpck_require__(5923) stat.isFile() ? 'File'
// just call tar.r with the filter and mtimeCache : stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = (opt_, files, cb) => { module.exports = WriteEntry
const opt = hlo(opt_)
if (!opt.file)
throw new TypeError('file is required')
if (opt.gzip) /***/ }),
throw new TypeError('cannot append to compressed archives')
if (!files || !Array.isArray(files) || !files.length) /***/ 6684:
throw new TypeError('no files or directories specified') /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
files = Array.from(files) "use strict";
mtimeFilter(opt) const proc = typeof process === 'object' && process ? process : {
return r(opt, files, cb) stdout: null,
stderr: null,
} }
const EE = __nccwpck_require__(2361)
const Stream = __nccwpck_require__(2781)
const SD = (__nccwpck_require__(1576).StringDecoder)
const mtimeFilter = opt => { const EOF = Symbol('EOF')
const filter = opt.filter const MAYBE_EMIT_END = Symbol('maybeEmitEnd')
const EMITTED_END = Symbol('emittedEnd')
if (!opt.mtimeCache) const EMITTING_END = Symbol('emittingEnd')
opt.mtimeCache = new Map() const EMITTED_ERROR = Symbol('emittedError')
const CLOSED = Symbol('closed')
const READ = Symbol('read')
const FLUSH = Symbol('flush')
const FLUSHCHUNK = Symbol('flushChunk')
const ENCODING = Symbol('encoding')
const DECODER = Symbol('decoder')
const FLOWING = Symbol('flowing')
const PAUSED = Symbol('paused')
const RESUME = Symbol('resume')
const BUFFERLENGTH = Symbol('bufferLength')
const BUFFERPUSH = Symbol('bufferPush')
const BUFFERSHIFT = Symbol('bufferShift')
const OBJECTMODE = Symbol('objectMode')
const DESTROYED = Symbol('destroyed')
const EMITDATA = Symbol('emitData')
const EMITEND = Symbol('emitEnd')
const EMITEND2 = Symbol('emitEnd2')
const ASYNC = Symbol('async')
opt.filter = filter ? (path, stat) => const defer = fn => Promise.resolve().then(fn)
filter(path, stat) && !(opt.mtimeCache.get(path) > stat.mtime)
: (path, stat) => !(opt.mtimeCache.get(path) > stat.mtime)
}
// TODO remove when Node v8 support drops
const doIter = global._MP_NO_ITERATOR_SYMBOLS_ !== '1'
const ASYNCITERATOR = doIter && Symbol.asyncIterator
|| Symbol('asyncIterator not implemented')
const ITERATOR = doIter && Symbol.iterator
|| Symbol('iterator not implemented')
/***/ }), // events that mean 'the stream is over'
// these are treated specially, and re-emitted
// if they are listened for after emitting.
const isEndish = ev =>
ev === 'end' ||
ev === 'finish' ||
ev === 'prefinish'
/***/ 5899: const isArrayBuffer = b => b instanceof ArrayBuffer ||
/***/ ((module) => { typeof b === 'object' &&
b.constructor &&
b.constructor.name === 'ArrayBuffer' &&
b.byteLength >= 0
"use strict"; const isArrayBufferView = b => !Buffer.isBuffer(b) && ArrayBuffer.isView(b)
module.exports = Base => class extends Base { class Pipe {
warn (code, message, data = {}) { constructor (src, dest, opts) {
if (this.file) this.src = src
data.file = this.file this.dest = dest
if (this.cwd) this.opts = opts
data.cwd = this.cwd this.ondrain = () => src[RESUME]()
data.code = message instanceof Error && message.code || code dest.on('drain', this.ondrain)
data.tarCode = code }
if (!this.strict && data.recoverable !== false) { unpipe () {
if (message instanceof Error) { this.dest.removeListener('drain', this.ondrain)
data = Object.assign(message, data) }
message = message.message // istanbul ignore next - only here for the prototype
} proxyErrors () {}
this.emit('warn', data.tarCode, message, data) end () {
} else if (message instanceof Error) this.unpipe()
this.emit('error', Object.assign(message, data)) if (this.opts.end)
else this.dest.end()
this.emit('error', Object.assign(new Error(`${code}: ${message}`), data))
} }
} }
class PipeProxyErrors extends Pipe {
unpipe () {
this.src.removeListener('error', this.proxyErrors)
super.unpipe()
}
constructor (src, dest, opts) {
super(src, dest, opts)
this.proxyErrors = er => dest.emit('error', er)
src.on('error', this.proxyErrors)
}
}
/***/ }), module.exports = class Minipass extends Stream {
constructor (options) {
/***/ 4808: super()
/***/ ((module) => { this[FLOWING] = false
// whether we're explicitly paused
"use strict"; this[PAUSED] = false
this.pipes = []
this.buffer = []
this[OBJECTMODE] = options && options.objectMode || false
if (this[OBJECTMODE])
this[ENCODING] = null
else
this[ENCODING] = options && options.encoding || null
if (this[ENCODING] === 'buffer')
this[ENCODING] = null
this[ASYNC] = options && !!options.async || false
this[DECODER] = this[ENCODING] ? new SD(this[ENCODING]) : null
this[EOF] = false
this[EMITTED_END] = false
this[EMITTING_END] = false
this[CLOSED] = false
this[EMITTED_ERROR] = null
this.writable = true
this.readable = true
this[BUFFERLENGTH] = 0
this[DESTROYED] = false
}
get bufferLength () { return this[BUFFERLENGTH] }
// When writing files on Windows, translate the characters to their get encoding () { return this[ENCODING] }
// 0xf000 higher-encoded versions. set encoding (enc) {
if (this[OBJECTMODE])
throw new Error('cannot set encoding in objectMode')
const raw = [ if (this[ENCODING] && enc !== this[ENCODING] &&
'|', (this[DECODER] && this[DECODER].lastNeed || this[BUFFERLENGTH]))
'<', throw new Error('cannot change encoding')
'>',
'?',
':',
]
const win = raw.map(char => if (this[ENCODING] !== enc) {
String.fromCharCode(0xf000 + char.charCodeAt(0))) this[DECODER] = enc ? new SD(enc) : null
if (this.buffer.length)
this.buffer = this.buffer.map(chunk => this[DECODER].write(chunk))
}
const toWin = new Map(raw.map((char, i) => [char, win[i]])) this[ENCODING] = enc
const toRaw = new Map(win.map((char, i) => [char, raw[i]])) }
module.exports = { setEncoding (enc) {
encode: s => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s), this.encoding = enc
decode: s => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s), }
}
get objectMode () { return this[OBJECTMODE] }
set objectMode (om) { this[OBJECTMODE] = this[OBJECTMODE] || !!om }
/***/ }), get ['async'] () { return this[ASYNC] }
set ['async'] (a) { this[ASYNC] = this[ASYNC] || !!a }
/***/ 5450: write (chunk, encoding, cb) {
/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { if (this[EOF])
throw new Error('write after end')
"use strict"; if (this[DESTROYED]) {
this.emit('error', Object.assign(
new Error('Cannot call write after a stream was destroyed'),
{ code: 'ERR_STREAM_DESTROYED' }
))
return true
}
const MiniPass = __nccwpck_require__(1077) if (typeof encoding === 'function')
const Pax = __nccwpck_require__(7996) cb = encoding, encoding = 'utf8'
const Header = __nccwpck_require__(6043)
const fs = __nccwpck_require__(7147)
const path = __nccwpck_require__(1017)
const normPath = __nccwpck_require__(6843)
const stripSlash = __nccwpck_require__(8886)
const prefixPath = (path, prefix) => { if (!encoding)
if (!prefix) encoding = 'utf8'
return normPath(path)
path = normPath(path).replace(/^\.(\/|$)/, '')
return stripSlash(prefix) + '/' + path
}
const maxReadSize = 16 * 1024 * 1024 const fn = this[ASYNC] ? defer : f => f()
const PROCESS = Symbol('process')
const FILE = Symbol('file')
const DIRECTORY = Symbol('directory')
const SYMLINK = Symbol('symlink')
const HARDLINK = Symbol('hardlink')
const HEADER = Symbol('header')
const READ = Symbol('read')
const LSTAT = Symbol('lstat')
const ONLSTAT = Symbol('onlstat')
const ONREAD = Symbol('onread')
const ONREADLINK = Symbol('onreadlink')
const OPENFILE = Symbol('openfile')
const ONOPENFILE = Symbol('onopenfile')
const CLOSE = Symbol('close')
const MODE = Symbol('mode')
const AWAITDRAIN = Symbol('awaitDrain')
const ONDRAIN = Symbol('ondrain')
const PREFIX = Symbol('prefix')
const HAD_ERROR = Symbol('hadError')
const warner = __nccwpck_require__(5899)
const winchars = __nccwpck_require__(4808)
const stripAbsolutePath = __nccwpck_require__(7111)
const modeFix = __nccwpck_require__(8371) // convert array buffers and typed array views into buffers
// at some point in the future, we may want to do the opposite!
// leave strings and buffers as-is
// anything else switches us into object mode
if (!this[OBJECTMODE] && !Buffer.isBuffer(chunk)) {
if (isArrayBufferView(chunk))
chunk = Buffer.from(chunk.buffer, chunk.byteOffset, chunk.byteLength)
else if (isArrayBuffer(chunk))
chunk = Buffer.from(chunk)
else if (typeof chunk !== 'string')
// use the setter so we throw if we have encoding set
this.objectMode = true
}
const WriteEntry = warner(class WriteEntry extends MiniPass { // handle object mode up front, since it's simpler
constructor (p, opt) { // this yields better performance, fewer checks later.
opt = opt || {} if (this[OBJECTMODE]) {
super(opt) /* istanbul ignore if - maybe impossible? */
if (typeof p !== 'string') if (this.flowing && this[BUFFERLENGTH] !== 0)
throw new TypeError('path is required') this[FLUSH](true)
this.path = normPath(p)
// suppress atime, ctime, uid, gid, uname, gname
this.portable = !!opt.portable
// until node has builtin pwnam functions, this'll have to do
this.myuid = process.getuid && process.getuid() || 0
this.myuser = process.env.USER || ''
this.maxReadSize = opt.maxReadSize || maxReadSize
this.linkCache = opt.linkCache || new Map()
this.statCache = opt.statCache || new Map()
this.preservePaths = !!opt.preservePaths
this.cwd = normPath(opt.cwd || process.cwd())
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.mtime = opt.mtime || null
this.prefix = opt.prefix ? normPath(opt.prefix) : null
this.fd = null if (this.flowing)
this.blockLen = null this.emit('data', chunk)
this.blockRemain = null else
this.buf = null this[BUFFERPUSH](chunk)
this.offset = null
this.length = null
this.pos = null
this.remain = null
if (typeof opt.onwarn === 'function') if (this[BUFFERLENGTH] !== 0)
this.on('warn', opt.onwarn) this.emit('readable')
let pathWarn = false if (cb)
if (!this.preservePaths) { fn(cb)
const [root, stripped] = stripAbsolutePath(this.path)
if (root) {
this.path = stripped
pathWarn = root
}
}
this.win32 = !!opt.win32 || process.platform === 'win32' return this.flowing
if (this.win32) {
// force the \ to / normalization, since we might not *actually*
// be on windows, but want \ to be considered a path separator.
this.path = winchars.decode(this.path.replace(/\\/g, '/'))
p = p.replace(/\\/g, '/')
} }
this.absolute = normPath(opt.absolute || path.resolve(this.cwd, p)) // at this point the chunk is a buffer or string
// don't buffer it up or send it to the decoder
if (this.path === '') if (!chunk.length) {
this.path = './' if (this[BUFFERLENGTH] !== 0)
this.emit('readable')
if (cb)
fn(cb)
return this.flowing
}
if (pathWarn) { // fast-path writing strings of same encoding to a stream with
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { // an empty buffer, skipping the buffer/decoder dance
entry: this, if (typeof chunk === 'string' &&
path: pathWarn + this.path, // unless it is a string already ready for us to use
}) !(encoding === this[ENCODING] && !this[DECODER].lastNeed)) {
chunk = Buffer.from(chunk, encoding)
} }
if (this.statCache.has(this.absolute)) if (Buffer.isBuffer(chunk) && this[ENCODING])
this[ONLSTAT](this.statCache.get(this.absolute)) chunk = this[DECODER].write(chunk)
else
this[LSTAT]()
}
emit (ev, ...data) { // Note: flushing CAN potentially switch us into not-flowing mode
if (ev === 'error') if (this.flowing && this[BUFFERLENGTH] !== 0)
this[HAD_ERROR] = true this[FLUSH](true)
return super.emit(ev, ...data)
}
[LSTAT] () { if (this.flowing)
fs.lstat(this.absolute, (er, stat) => { this.emit('data', chunk)
if (er) else
return this.emit('error', er) this[BUFFERPUSH](chunk)
this[ONLSTAT](stat)
})
}
[ONLSTAT] (stat) { if (this[BUFFERLENGTH] !== 0)
this.statCache.set(this.absolute, stat) this.emit('readable')
this.stat = stat
if (!stat.isFile())
stat.size = 0
this.type = getType(stat)
this.emit('stat', stat)
this[PROCESS]()
}
[PROCESS] () { if (cb)
switch (this.type) { fn(cb)
case 'File': return this[FILE]()
case 'Directory': return this[DIRECTORY]()
case 'SymbolicLink': return this[SYMLINK]()
// unsupported types are ignored.
default: return this.end()
}
}
[MODE] (mode) { return this.flowing
return modeFix(mode, this.type === 'Directory', this.portable)
} }
[PREFIX] (path) { read (n) {
return prefixPath(path, this.prefix) if (this[DESTROYED])
} return null
[HEADER] () { if (this[BUFFERLENGTH] === 0 || n === 0 || n > this[BUFFERLENGTH]) {
if (this.type === 'Directory' && this.portable) this[MAYBE_EMIT_END]()
this.noMtime = true return null
}
this.header = new Header({ if (this[OBJECTMODE])
path: this[PREFIX](this.path), n = null
// only apply the prefix to hard links.
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this[MODE](this.stat.mode),
uid: this.portable ? null : this.stat.uid,
gid: this.portable ? null : this.stat.gid,
size: this.stat.size,
mtime: this.noMtime ? null : this.mtime || this.stat.mtime,
type: this.type,
uname: this.portable ? null :
this.stat.uid === this.myuid ? this.myuser : '',
atime: this.portable ? null : this.stat.atime,
ctime: this.portable ? null : this.stat.ctime,
})
if (this.header.encode() && !this.noPax) { if (this.buffer.length > 1 && !this[OBJECTMODE]) {
super.write(new Pax({ if (this.encoding)
atime: this.portable ? null : this.header.atime, this.buffer = [this.buffer.join('')]
ctime: this.portable ? null : this.header.ctime, else
gid: this.portable ? null : this.header.gid, this.buffer = [Buffer.concat(this.buffer, this[BUFFERLENGTH])]
mtime: this.noMtime ? null : this.mtime || this.header.mtime,
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.header.size,
uid: this.portable ? null : this.header.uid,
uname: this.portable ? null : this.header.uname,
dev: this.portable ? null : this.stat.dev,
ino: this.portable ? null : this.stat.ino,
nlink: this.portable ? null : this.stat.nlink,
}).encode())
} }
super.write(this.header.block)
const ret = this[READ](n || null, this.buffer[0])
this[MAYBE_EMIT_END]()
return ret
} }
[DIRECTORY] () { [READ] (n, chunk) {
if (this.path.substr(-1) !== '/') if (n === chunk.length || n === null)
this.path += '/' this[BUFFERSHIFT]()
this.stat.size = 0 else {
this[HEADER]() this.buffer[0] = chunk.slice(n)
this.end() chunk = chunk.slice(0, n)
this[BUFFERLENGTH] -= n
}
this.emit('data', chunk)
if (!this.buffer.length && !this[EOF])
this.emit('drain')
return chunk
} }
[SYMLINK] () { end (chunk, encoding, cb) {
fs.readlink(this.absolute, (er, linkpath) => { if (typeof chunk === 'function')
if (er) cb = chunk, chunk = null
return this.emit('error', er) if (typeof encoding === 'function')
this[ONREADLINK](linkpath) cb = encoding, encoding = 'utf8'
}) if (chunk)
this.write(chunk, encoding)
if (cb)
this.once('end', cb)
this[EOF] = true
this.writable = false
// if we haven't written anything, then go ahead and emit,
// even if we're not reading.
// we'll re-emit if a new 'end' listener is added anyway.
// This makes MP more suitable to write-only use cases.
if (this.flowing || !this[PAUSED])
this[MAYBE_EMIT_END]()
return this
} }
[ONREADLINK] (linkpath) { // don't let the internal resume be overwritten
this.linkpath = normPath(linkpath) [RESUME] () {
this[HEADER]() if (this[DESTROYED])
this.end() return
this[PAUSED] = false
this[FLOWING] = true
this.emit('resume')
if (this.buffer.length)
this[FLUSH]()
else if (this[EOF])
this[MAYBE_EMIT_END]()
else
this.emit('drain')
} }
[HARDLINK] (linkpath) { resume () {
this.type = 'Link' return this[RESUME]()
this.linkpath = normPath(path.relative(this.cwd, linkpath))
this.stat.size = 0
this[HEADER]()
this.end()
} }
[FILE] () { pause () {
if (this.stat.nlink > 1) { this[FLOWING] = false
const linkKey = this.stat.dev + ':' + this.stat.ino this[PAUSED] = true
if (this.linkCache.has(linkKey)) { }
const linkpath = this.linkCache.get(linkKey)
if (linkpath.indexOf(this.cwd) === 0)
return this[HARDLINK](linkpath)
}
this.linkCache.set(linkKey, this.absolute)
}
this[HEADER]() get destroyed () {
if (this.stat.size === 0) return this[DESTROYED]
return this.end() }
this[OPENFILE]() get flowing () {
return this[FLOWING]
} }
[OPENFILE] () { get paused () {
fs.open(this.absolute, 'r', (er, fd) => { return this[PAUSED]
if (er)
return this.emit('error', er)
this[ONOPENFILE](fd)
})
} }
[ONOPENFILE] (fd) { [BUFFERPUSH] (chunk) {
this.fd = fd if (this[OBJECTMODE])
if (this[HAD_ERROR]) this[BUFFERLENGTH] += 1
return this[CLOSE]() else
this[BUFFERLENGTH] += chunk.length
this.buffer.push(chunk)
}
this.blockLen = 512 * Math.ceil(this.stat.size / 512) [BUFFERSHIFT] () {
this.blockRemain = this.blockLen if (this.buffer.length) {
const bufLen = Math.min(this.blockLen, this.maxReadSize) if (this[OBJECTMODE])
this.buf = Buffer.allocUnsafe(bufLen) this[BUFFERLENGTH] -= 1
this.offset = 0 else
this.pos = 0 this[BUFFERLENGTH] -= this.buffer[0].length
this.remain = this.stat.size }
this.length = this.buf.length return this.buffer.shift()
this[READ]()
} }
[READ] () { [FLUSH] (noDrain) {
const { fd, buf, offset, length, pos } = this do {} while (this[FLUSHCHUNK](this[BUFFERSHIFT]()))
fs.read(fd, buf, offset, length, pos, (er, bytesRead) => {
if (er) { if (!noDrain && !this.buffer.length && !this[EOF])
// ignoring the error from close(2) is a bad practice, but at this.emit('drain')
// this point we already have an error, don't need another one
return this[CLOSE](() => this.emit('error', er))
}
this[ONREAD](bytesRead)
})
} }
[CLOSE] (cb) { [FLUSHCHUNK] (chunk) {
fs.close(this.fd, cb) return chunk ? (this.emit('data', chunk), this.flowing) : false
} }
[ONREAD] (bytesRead) { pipe (dest, opts) {
if (bytesRead <= 0 && this.remain > 0) { if (this[DESTROYED])
const er = new Error('encountered unexpected EOF') return
er.path = this.absolute
er.syscall = 'read'
er.code = 'EOF'
return this[CLOSE](() => this.emit('error', er))
}
if (bytesRead > this.remain) { const ended = this[EMITTED_END]
const er = new Error('did not encounter expected EOF') opts = opts || {}
er.path = this.absolute if (dest === proc.stdout || dest === proc.stderr)
er.syscall = 'read' opts.end = false
er.code = 'EOF' else
return this[CLOSE](() => this.emit('error', er)) opts.end = opts.end !== false
} opts.proxyErrors = !!opts.proxyErrors
// null out the rest of the buffer, if we could fit the block padding // piping an ended stream ends immediately
// at the end of this loop, we've incremented bytesRead and this.remain if (ended) {
// to be incremented up to the blockRemain level, as if we had expected if (opts.end)
// to get a null-padded file, and read it until the end. then we will dest.end()
// decrement both remain and blockRemain by bytesRead, and know that we } else {
// reached the expected EOF, without any null buffer to append. this.pipes.push(!opts.proxyErrors ? new Pipe(this, dest, opts)
if (bytesRead === this.remain) { : new PipeProxyErrors(this, dest, opts))
for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { if (this[ASYNC])
this.buf[i + this.offset] = 0 defer(() => this[RESUME]())
bytesRead++ else
this.remain++ this[RESUME]()
}
} }
const writeBuf = this.offset === 0 && bytesRead === this.buf.length ? return dest
this.buf : this.buf.slice(this.offset, this.offset + bytesRead) }
const flushed = this.write(writeBuf) unpipe (dest) {
if (!flushed) const p = this.pipes.find(p => p.dest === dest)
this[AWAITDRAIN](() => this[ONDRAIN]()) if (p) {
else this.pipes.splice(this.pipes.indexOf(p), 1)
this[ONDRAIN]() p.unpipe()
}
} }
[AWAITDRAIN] (cb) { addListener (ev, fn) {
this.once('drain', cb) return this.on(ev, fn)
} }
write (writeBuf) { on (ev, fn) {
if (this.blockRemain < writeBuf.length) { const ret = super.on(ev, fn)
const er = new Error('writing more data than expected') if (ev === 'data' && !this.pipes.length && !this.flowing)
er.path = this.absolute this[RESUME]()
return this.emit('error', er) else if (ev === 'readable' && this[BUFFERLENGTH] !== 0)
super.emit('readable')
else if (isEndish(ev) && this[EMITTED_END]) {
super.emit(ev)
this.removeAllListeners(ev)
} else if (ev === 'error' && this[EMITTED_ERROR]) {
if (this[ASYNC])
defer(() => fn.call(this, this[EMITTED_ERROR]))
else
fn.call(this, this[EMITTED_ERROR])
} }
this.remain -= writeBuf.length return ret
this.blockRemain -= writeBuf.length
this.pos += writeBuf.length
this.offset += writeBuf.length
return super.write(writeBuf)
} }
[ONDRAIN] () { get emittedEnd () {
if (!this.remain) { return this[EMITTED_END]
if (this.blockRemain) }
super.write(Buffer.alloc(this.blockRemain))
return this[CLOSE](er => er ? this.emit('error', er) : this.end()) [MAYBE_EMIT_END] () {
if (!this[EMITTING_END] &&
!this[EMITTED_END] &&
!this[DESTROYED] &&
this.buffer.length === 0 &&
this[EOF]) {
this[EMITTING_END] = true
this.emit('end')
this.emit('prefinish')
this.emit('finish')
if (this[CLOSED])
this.emit('close')
this[EMITTING_END] = false
} }
}
if (this.offset >= this.length) { emit (ev, data, ...extra) {
// if we only have a smaller bit left to read, alloc a smaller buffer // error and close are only events allowed after calling destroy()
// otherwise, keep it the same length it was before. if (ev !== 'error' && ev !== 'close' && ev !== DESTROYED && this[DESTROYED])
this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)) return
this.offset = 0 else if (ev === 'data') {
return !data ? false
: this[ASYNC] ? defer(() => this[EMITDATA](data))
: this[EMITDATA](data)
} else if (ev === 'end') {
return this[EMITEND]()
} else if (ev === 'close') {
this[CLOSED] = true
// don't emit close before 'end' and 'finish'
if (!this[EMITTED_END] && !this[DESTROYED])
return
const ret = super.emit('close')
this.removeAllListeners('close')
return ret
} else if (ev === 'error') {
this[EMITTED_ERROR] = data
const ret = super.emit('error', data)
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'resume') {
const ret = super.emit('resume')
this[MAYBE_EMIT_END]()
return ret
} else if (ev === 'finish' || ev === 'prefinish') {
const ret = super.emit(ev)
this.removeAllListeners(ev)
return ret
} }
this.length = this.buf.length - this.offset
this[READ]()
}
})
class WriteEntrySync extends WriteEntry { // Some other unknown event
[LSTAT] () { const ret = super.emit(ev, data, ...extra)
this[ONLSTAT](fs.lstatSync(this.absolute)) this[MAYBE_EMIT_END]()
return ret
} }
[SYMLINK] () { [EMITDATA] (data) {
this[ONREADLINK](fs.readlinkSync(this.absolute)) for (const p of this.pipes) {
if (p.dest.write(data) === false)
this.pause()
}
const ret = super.emit('data', data)
this[MAYBE_EMIT_END]()
return ret
} }
[OPENFILE] () { [EMITEND] () {
this[ONOPENFILE](fs.openSync(this.absolute, 'r')) if (this[EMITTED_END])
return
this[EMITTED_END] = true
this.readable = false
if (this[ASYNC])
defer(() => this[EMITEND2]())
else
this[EMITEND2]()
} }
[READ] () { [EMITEND2] () {
let threw = true if (this[DECODER]) {
try { const data = this[DECODER].end()
const { fd, buf, offset, length, pos } = this if (data) {
const bytesRead = fs.readSync(fd, buf, offset, length, pos) for (const p of this.pipes) {
this[ONREAD](bytesRead) p.dest.write(data)
threw = false }
} finally { super.emit('data', data)
// ignoring the error from close(2) is a bad practice, but at
// this point we already have an error, don't need another one
if (threw) {
try {
this[CLOSE](() => {})
} catch (er) {}
} }
} }
}
[AWAITDRAIN] (cb) { for (const p of this.pipes) {
cb() p.end()
}
const ret = super.emit('end')
this.removeAllListeners('end')
return ret
} }
[CLOSE] (cb) { // const all = await stream.collect()
fs.closeSync(this.fd) collect () {
cb() const buf = []
if (!this[OBJECTMODE])
buf.dataLength = 0
// set the promise first, in case an error is raised
// by triggering the flow here.
const p = this.promise()
this.on('data', c => {
buf.push(c)
if (!this[OBJECTMODE])
buf.dataLength += c.length
})
return p.then(() => buf)
} }
}
const WriteEntryTar = warner(class WriteEntryTar extends MiniPass {
constructor (readEntry, opt) {
opt = opt || {}
super(opt)
this.preservePaths = !!opt.preservePaths
this.portable = !!opt.portable
this.strict = !!opt.strict
this.noPax = !!opt.noPax
this.noMtime = !!opt.noMtime
this.readEntry = readEntry // const data = await stream.concat()
this.type = readEntry.type concat () {
if (this.type === 'Directory' && this.portable) return this[OBJECTMODE]
this.noMtime = true ? Promise.reject(new Error('cannot concat in objectMode'))
: this.collect().then(buf =>
this[OBJECTMODE]
? Promise.reject(new Error('cannot concat in objectMode'))
: this[ENCODING] ? buf.join('') : Buffer.concat(buf, buf.dataLength))
}
this.prefix = opt.prefix || null // stream.promise().then(() => done, er => emitted error)
promise () {
return new Promise((resolve, reject) => {
this.on(DESTROYED, () => reject(new Error('stream destroyed')))
this.on('error', er => reject(er))
this.on('end', () => resolve())
})
}
this.path = normPath(readEntry.path) // for await (let chunk of stream)
this.mode = this[MODE](readEntry.mode) [ASYNCITERATOR] () {
this.uid = this.portable ? null : readEntry.uid const next = () => {
this.gid = this.portable ? null : readEntry.gid const res = this.read()
this.uname = this.portable ? null : readEntry.uname if (res !== null)
this.gname = this.portable ? null : readEntry.gname return Promise.resolve({ done: false, value: res })
this.size = readEntry.size
this.mtime = this.noMtime ? null : opt.mtime || readEntry.mtime
this.atime = this.portable ? null : readEntry.atime
this.ctime = this.portable ? null : readEntry.ctime
this.linkpath = normPath(readEntry.linkpath)
if (typeof opt.onwarn === 'function') if (this[EOF])
this.on('warn', opt.onwarn) return Promise.resolve({ done: true })
let pathWarn = false let resolve = null
if (!this.preservePaths) { let reject = null
const [root, stripped] = stripAbsolutePath(this.path) const onerr = er => {
if (root) { this.removeListener('data', ondata)
this.path = stripped this.removeListener('end', onend)
pathWarn = root reject(er)
}
const ondata = value => {
this.removeListener('error', onerr)
this.removeListener('end', onend)
this.pause()
resolve({ value: value, done: !!this[EOF] })
}
const onend = () => {
this.removeListener('error', onerr)
this.removeListener('data', ondata)
resolve({ done: true })
} }
const ondestroy = () => onerr(new Error('stream destroyed'))
return new Promise((res, rej) => {
reject = rej
resolve = res
this.once(DESTROYED, ondestroy)
this.once('error', onerr)
this.once('end', onend)
this.once('data', ondata)
})
} }
this.remain = readEntry.size return { next }
this.blockRemain = readEntry.startBlockSize }
this.header = new Header({
path: this[PREFIX](this.path),
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
// only the permissions and setuid/setgid/sticky bitflags
// not the higher-order bits that specify file type
mode: this.mode,
uid: this.portable ? null : this.uid,
gid: this.portable ? null : this.gid,
size: this.size,
mtime: this.noMtime ? null : this.mtime,
type: this.type,
uname: this.portable ? null : this.uname,
atime: this.portable ? null : this.atime,
ctime: this.portable ? null : this.ctime,
})
if (pathWarn) { // for (let chunk of stream)
this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { [ITERATOR] () {
entry: this, const next = () => {
path: pathWarn + this.path, const value = this.read()
}) const done = value === null
return { value, done }
} }
return { next }
}
if (this.header.encode() && !this.noPax) { destroy (er) {
super.write(new Pax({ if (this[DESTROYED]) {
atime: this.portable ? null : this.atime, if (er)
ctime: this.portable ? null : this.ctime, this.emit('error', er)
gid: this.portable ? null : this.gid, else
mtime: this.noMtime ? null : this.mtime, this.emit(DESTROYED)
path: this[PREFIX](this.path), return this
linkpath: this.type === 'Link' ? this[PREFIX](this.linkpath)
: this.linkpath,
size: this.size,
uid: this.portable ? null : this.uid,
uname: this.portable ? null : this.uname,
dev: this.portable ? null : this.readEntry.dev,
ino: this.portable ? null : this.readEntry.ino,
nlink: this.portable ? null : this.readEntry.nlink,
}).encode())
} }
super.write(this.header.block) this[DESTROYED] = true
readEntry.pipe(this)
}
[PREFIX] (path) { // throw away all buffered data, it's never coming out
return prefixPath(path, this.prefix) this.buffer.length = 0
} this[BUFFERLENGTH] = 0
[MODE] (mode) { if (typeof this.close === 'function' && !this[CLOSED])
return modeFix(mode, this.type === 'Directory', this.portable) this.close()
}
write (data) { if (er)
const writeLen = data.length this.emit('error', er)
if (writeLen > this.blockRemain) else // if no error to emit, still reject pending promises
throw new Error('writing more to entry than is appropriate') this.emit(DESTROYED)
this.blockRemain -= writeLen
return super.write(data)
}
end () { return this
if (this.blockRemain)
super.write(Buffer.alloc(this.blockRemain))
return super.end()
} }
})
WriteEntry.Sync = WriteEntrySync
WriteEntry.Tar = WriteEntryTar
const getType = stat =>
stat.isFile() ? 'File'
: stat.isDirectory() ? 'Directory'
: stat.isSymbolicLink() ? 'SymbolicLink'
: 'Unsupported'
module.exports = WriteEntry static isStream (s) {
return !!s && (s instanceof Minipass || s instanceof Stream ||
s instanceof EE && (
typeof s.pipe === 'function' || // readable
(typeof s.write === 'function' && typeof s.end === 'function') // writable
))
}
}
/***/ }), /***/ }),
...@@ -16275,6 +17574,14 @@ module.exports = require("buffer"); ...@@ -16275,6 +17574,14 @@ module.exports = require("buffer");
/***/ }), /***/ }),
/***/ 2081:
/***/ ((module) => {
"use strict";
module.exports = require("child_process");
/***/ }),
/***/ 6113: /***/ 6113:
/***/ ((module) => { /***/ ((module) => {
......
This source diff could not be displayed because it is too large. You can view the blob instead.
...@@ -520,7 +520,7 @@ minipass ...@@ -520,7 +520,7 @@ minipass
ISC ISC
The ISC License The ISC License
Copyright (c) npm, Inc. and Contributors Copyright (c) 2017-2022 npm, Inc., Isaac Z. Schlueter, and Contributors
Permission to use, copy, modify, and/or distribute this software for any Permission to use, copy, modify, and/or distribute this software for any
purpose with or without fee is hereby granted, provided that the above purpose with or without fee is hereby granted, provided that the above
......
"use strict";
Object.defineProperty(exports, "__esModule", { value: true });
...@@ -32,95 +32,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge ...@@ -32,95 +32,21 @@ var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, ge
}); });
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.generateFeaturesDocumentation = void 0; exports.generateTemplateDocumentation = exports.generateFeaturesDocumentation = void 0;
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const github = __importStar(require("@actions/github"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const path = __importStar(require("path")); const path = __importStar(require("path"));
function generateFeaturesDocumentation(basePath) { const utils_1 = require("./utils");
return __awaiter(this, void 0, void 0, function* () { const FEATURES_README_TEMPLATE = `
fs.readdir(basePath, (err, files) => { # #{Name}
if (err) {
core.error(err.message);
core.setFailed(`failed to generate 'features' documentation ${err.message}`);
return;
}
files.forEach(f => {
core.info(`Generating docs for feature '${f}'`);
if (f !== '.' && f !== '..') {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const featureJsonPath = path.join(basePath, f, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) {
core.error(`devcontainer-feature.json not found at path '${featureJsonPath}'`);
return;
}
let featureJson = undefined;
try {
featureJson = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${featureJsonPath}: ${err}`);
return;
}
if (!featureJson || !(featureJson === null || featureJson === void 0 ? void 0 : featureJson.id)) {
core.error(`devcontainer-feature.json for feature '${f}' does not contain an 'id'`);
return;
}
const ref = github.context.ref;
const owner = github.context.repo.owner;
const repo = github.context.repo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = featureJson === null || featureJson === void 0 ? void 0 : featureJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return ('| Options Id | Description | Type | Default Value |\n' +
'|-----|-----|-----|-----|\n' +
contents);
};
const newReadme = README_TEMPLATE.replace('#{nwo}', `${owner}/${repo}`)
.replace('#{versionTag}', versionTag)
.replace('#{featureId}', featureJson.id)
.replace('#{featureName}', featureJson.name
? `${featureJson.name} (${featureJson.id})`
: `${featureJson.id}`)
.replace('#{featureDescription}', featureJson.description ? featureJson.description : '')
.replace('#{optionsTable}', generateOptionsMarkdown());
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
});
});
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
const README_TEMPLATE = `
# #{featureName}
#{featureDescription} #{Description}
## Example Usage ## Example Usage
\`\`\`json \`\`\`json
"features": { "features": {
"#{nwo}/#{featureId}@#{versionTag}": { "#{Nwo}/#{Id}@#{VersionTag}": {
"version": "latest" "version": "latest"
} }
} }
...@@ -128,9 +54,105 @@ const README_TEMPLATE = ` ...@@ -128,9 +54,105 @@ const README_TEMPLATE = `
## Options ## Options
#{optionsTable} #{OptionsTable}
--- ---
_Note: This file was auto-generated from the [devcontainer-feature.json](./devcontainer-feature.json)._ _Note: This file was auto-generated from the [devcontainer-feature.json](#{RepoUrl})._
`; `;
const TEMPLATE_README_TEMPLATE = `
# #{Name}
#{Description}
## Options
#{OptionsTable}
`;
function generateFeaturesDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, FEATURES_README_TEMPLATE, 'devcontainer-feature.json');
});
}
exports.generateFeaturesDocumentation = generateFeaturesDocumentation;
function generateTemplateDocumentation(basePath) {
return __awaiter(this, void 0, void 0, function* () {
yield _generateDocumentation(basePath, TEMPLATE_README_TEMPLATE, 'devcontainer-template.json');
});
}
exports.generateTemplateDocumentation = generateTemplateDocumentation;
function _generateDocumentation(basePath, readmeTemplate, metadataFile) {
return __awaiter(this, void 0, void 0, function* () {
const directories = fs.readdirSync(basePath);
yield Promise.all(directories.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a, _b, _c;
if (!f.startsWith('.')) {
const readmePath = path.join(basePath, f, 'README.md');
// Reads in feature.json
const jsonPath = path.join(basePath, f, metadataFile);
if (!fs.existsSync(jsonPath)) {
core.error(`${metadataFile} not found at path '${jsonPath}'`);
return;
}
let parsedJson = undefined;
try {
parsedJson = JSON.parse(fs.readFileSync(jsonPath, 'utf8'));
}
catch (err) {
core.error(`Failed to parse ${jsonPath}: ${err}`);
return;
}
if (!parsedJson || !(parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.id)) {
core.error(`${metadataFile} for '${f}' does not contain an 'id'`);
return;
}
const srcInfo = (0, utils_1.getGitHubMetadata)();
const ref = srcInfo.ref;
const owner = srcInfo.owner;
const repo = srcInfo.repo;
// Add tag if parseable
let versionTag = 'latest';
if (ref && ref.includes('refs/tags/')) {
versionTag = ref.replace('refs/tags/', '');
}
const generateOptionsMarkdown = () => {
const options = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.options;
if (!options) {
return '';
}
const keys = Object.keys(options);
const contents = keys
.map(k => {
const val = options[k];
return `| ${k} | ${val.description || '-'} | ${val.type || '-'} | ${val.default || '-'} |`;
})
.join('\n');
return '| Options Id | Description | Type | Default Value |\n' + '|-----|-----|-----|-----|\n' + contents;
};
let urlToConfig = './devcontainer-feature.json';
const basePathTrimmed = basePath.startsWith('./') ? basePath.substring(2) : basePath;
if (srcInfo.owner && srcInfo.repo) {
urlToConfig = `https://github.com/${srcInfo.owner}/${srcInfo.repo}/blob/main/${basePathTrimmed}/${f}/devcontainer-feature.json`;
}
const newReadme = readmeTemplate
// Templates & Features
.replace('#{Id}', parsedJson.id)
.replace('#{Name}', parsedJson.name ? `${parsedJson.name} (${parsedJson.id})` : `${parsedJson.id}`)
.replace('#{Description}', (_a = parsedJson.description) !== null && _a !== void 0 ? _a : '')
.replace('#{OptionsTable}', generateOptionsMarkdown())
// Features Only
.replace('#{Nwo}', `${owner}/${repo}`)
.replace('#{VersionTag}', versionTag)
// Templates Only
.replace('#{ManifestName}', (_c = (_b = parsedJson === null || parsedJson === void 0 ? void 0 : parsedJson.image) === null || _b === void 0 ? void 0 : _b.manifest) !== null && _c !== void 0 ? _c : '')
.replace('#{RepoUrl}', urlToConfig);
// Remove previous readme
if (fs.existsSync(readmePath)) {
fs.unlinkSync(readmePath);
}
// Write new readme
fs.writeFileSync(readmePath, newReadme);
}
})));
});
}
...@@ -44,42 +44,51 @@ function run() { ...@@ -44,42 +44,51 @@ function run() {
core.debug('Reading input parameters...'); core.debug('Reading input parameters...');
// Read inputs // Read inputs
const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true'; const shouldPublishFeatures = core.getInput('publish-features').toLowerCase() === 'true';
const shouldPublishTemplate = core.getInput('publish-templates').toLowerCase() === 'true'; const shouldPublishTemplates = core.getInput('publish-templates').toLowerCase() === 'true';
const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true'; const shouldGenerateDocumentation = core.getInput('generate-docs').toLowerCase() === 'true';
// Experimental
const shouldTagIndividualFeatures = core.getInput('tag-individual-features').toLowerCase() === 'true';
const shouldPublishToNPM = core.getInput('publish-to-npm').toLowerCase() === 'true';
const shouldPublishReleaseArtifacts = core.getInput('publish-release-artifacts').toLowerCase() === 'true';
const shouldPublishToOCI = core.getInput('publish-to-oci').toLowerCase() === 'true';
const opts = {
shouldTagIndividualFeatures,
shouldPublishToNPM,
shouldPublishReleaseArtifacts,
shouldPublishToOCI
};
const featuresBasePath = core.getInput('base-path-to-features');
const templatesBasePath = core.getInput('base-path-to-templates');
let featuresMetadata = undefined; let featuresMetadata = undefined;
let templatesMetadata = undefined; let templatesMetadata = undefined;
// -- Package Release Artifacts
if (shouldPublishFeatures) { if (shouldPublishFeatures) {
core.info('Publishing features...'); core.info('Publishing features...');
const featuresBasePath = core.getInput('base-path-to-features'); featuresMetadata = yield packageFeatures(featuresBasePath, opts);
featuresMetadata = yield packageFeatures(featuresBasePath);
} }
if (shouldPublishTemplate) { if (shouldPublishTemplates) {
core.info('Publishing template...'); core.info('Publishing template...');
const basePathToDefinitions = core.getInput('base-path-to-templates'); templatesMetadata = yield packageTemplates(templatesBasePath);
templatesMetadata = undefined; // TODO
yield packageTemplates(basePathToDefinitions);
} }
if (shouldGenerateDocumentation) { // -- Generate Documentation
core.info('Generating documentation...'); if (shouldGenerateDocumentation && featuresBasePath) {
const featuresBasePath = core.getInput('base-path-to-features'); core.info('Generating documentation for features...');
if (featuresBasePath) { yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
yield (0, generateDocs_1.generateFeaturesDocumentation)(featuresBasePath);
}
else {
core.error("'base-path-to-features' input is required to generate documentation");
}
// TODO: base-path-to-templates
} }
// TODO: Programatically add feature/template fino with relevant metadata for UX clients. if (shouldGenerateDocumentation && templatesBasePath) {
core.info('Generation metadata file: devcontainer-collection.json'); core.info('Generating documentation for templates...');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata); yield (0, generateDocs_1.generateTemplateDocumentation)(templatesBasePath);
}
// -- Programatically add feature/template metadata to collections file.
core.info('Generating metadata file: devcontainer-collection.json');
yield (0, utils_1.addCollectionsMetadataFile)(featuresMetadata, templatesMetadata, opts);
}); });
} }
function packageFeatures(basePath) { function packageFeatures(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
core.info(`Archiving all features in ${basePath}`); core.info(`Archiving all features in ${basePath}`);
const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath); const metadata = yield (0, utils_1.getFeaturesAndPackage)(basePath, opts);
core.info('Packaging features has finished.'); core.info('Packaging features has finished.');
return metadata; return metadata;
} }
...@@ -94,14 +103,17 @@ function packageFeatures(basePath) { ...@@ -94,14 +103,17 @@ function packageFeatures(basePath) {
function packageTemplates(basePath) { function packageTemplates(basePath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
try { try {
core.info(`Archiving all templated in ${basePath}`); core.info(`Archiving all templates in ${basePath}`);
yield (0, utils_1.getTemplatesAndPackage)(basePath); const metadata = yield (0, utils_1.getTemplatesAndPackage)(basePath);
core.info('Packaging templates has finished.'); core.info('Packaging templates has finished.');
return metadata;
} }
catch (error) { catch (error) {
if (error instanceof Error) if (error instanceof Error) {
core.setFailed(error.message); core.setFailed(error.message);
}
} }
return;
}); });
} }
run(); run();
...@@ -35,11 +35,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) { ...@@ -35,11 +35,12 @@ var __importDefault = (this && this.__importDefault) || function (mod) {
return (mod && mod.__esModule) ? mod : { "default": mod }; return (mod && mod.__esModule) ? mod : { "default": mod };
}; };
Object.defineProperty(exports, "__esModule", { value: true }); Object.defineProperty(exports, "__esModule", { value: true });
exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.addCollectionsMetadataFile = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0; exports.getTemplatesAndPackage = exports.getFeaturesAndPackage = exports.pushCollectionsMetadataToOCI = exports.addCollectionsMetadataFile = exports.getGitHubMetadata = exports.tarDirectory = exports.renameLocal = exports.mkdirLocal = exports.writeLocalFile = exports.readLocalFile = void 0;
const github = __importStar(require("@actions/github")); const github = __importStar(require("@actions/github"));
const tar = __importStar(require("tar")); const tar = __importStar(require("tar"));
const fs = __importStar(require("fs")); const fs = __importStar(require("fs"));
const core = __importStar(require("@actions/core")); const core = __importStar(require("@actions/core"));
const child_process = __importStar(require("child_process"));
const util_1 = require("util"); const util_1 = require("util");
const path_1 = __importDefault(require("path")); const path_1 = __importDefault(require("path"));
exports.readLocalFile = (0, util_1.promisify)(fs.readFile); exports.readLocalFile = (0, util_1.promisify)(fs.readFile);
...@@ -62,23 +63,83 @@ function tarDirectory(path, tgzName) { ...@@ -62,23 +63,83 @@ function tarDirectory(path, tgzName) {
}); });
} }
exports.tarDirectory = tarDirectory; exports.tarDirectory = tarDirectory;
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) { function getGitHubMetadata() {
// Insert github repo metadata
const ref = github.context.ref;
let sourceInformation = {
owner: github.context.repo.owner,
repo: github.context.repo.repo,
ref,
sha: github.context.sha
};
// Add tag if parseable
if (ref.includes('refs/tags/')) {
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
}
return sourceInformation;
}
exports.getGitHubMetadata = getGitHubMetadata;
function tagFeatureAtVersion(featureMetaData) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json'); const featureId = featureMetaData.id;
// Insert github repo metadata const featureVersion = featureMetaData.version;
const ref = github.context.ref; const tagName = `${featureId}_v${featureVersion}`;
let sourceInformation = { // Get GITHUB_TOKEN from environment
source: 'github', const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
// Setup Octokit client
const octokit = github.getOctokit(githubToken);
// Use octokit to get all tags for this repo
const tags = yield octokit.rest.repos.listTags({
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
// See if tags for this release was already created.
const tagExists = tags.data.some(tag => tag.name === tagName);
if (tagExists) {
core.info(`Tag ${tagName} already exists. Skipping...`);
return;
}
// Create tag
const createdTag = yield octokit.rest.git.createTag({
tag: tagName,
message: `Feature ${featureId} version ${featureVersion}`,
object: github.context.sha,
type: 'commit',
owner: github.context.repo.owner,
repo: github.context.repo.repo
});
if (createdTag.status === 201) {
core.info(`Tagged '${tagName}'`);
}
else {
core.setFailed(`Failed to tag '${tagName}'`);
return;
}
// Create reference to tag
const createdRef = yield octokit.rest.git.createRef({
owner: github.context.repo.owner, owner: github.context.repo.owner,
repo: github.context.repo.repo, repo: github.context.repo.repo,
ref, ref: `refs/tags/${tagName}`,
sha: github.context.sha sha: createdTag.data.sha
}; });
// Add tag if parseable if (createdRef.status === 201) {
if (ref.includes('refs/tags/')) { core.info(`Created reference for '${tagName}'`);
const tag = ref.replace('refs/tags/', '');
sourceInformation = Object.assign(Object.assign({}, sourceInformation), { tag });
} }
else {
core.setFailed(`Failed to reference of tag '${tagName}'`);
return;
}
});
}
function addCollectionsMetadataFile(featuresMetadata, templatesMetadata, opts) {
return __awaiter(this, void 0, void 0, function* () {
const p = path_1.default.join('.', 'devcontainer-collection.json');
const sourceInformation = getGitHubMetadata();
const metadata = { const metadata = {
sourceInformation, sourceInformation,
features: featuresMetadata || [], features: featuresMetadata || [],
...@@ -86,27 +147,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) { ...@@ -86,27 +147,148 @@ function addCollectionsMetadataFile(featuresMetadata, templatesMetadata) {
}; };
// Write to the file // Write to the file
yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4)); yield (0, exports.writeLocalFile)(p, JSON.stringify(metadata, undefined, 4));
if (opts.shouldPublishToOCI) {
pushCollectionsMetadataToOCI(p);
}
}); });
} }
exports.addCollectionsMetadataFile = addCollectionsMetadataFile; exports.addCollectionsMetadataFile = addCollectionsMetadataFile;
function getFeaturesAndPackage(basePath) { function pushArtifactToOCI(version, featureName, artifactPath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const versions = [version, '1.0', '1', 'latest']; // TODO: Generate semantic versions from 'version'
const sourceInfo = getGitHubMetadata();
yield Promise.all(versions.map((v) => __awaiter(this, void 0, void 0, function* () {
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}/${featureName}:${v}`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${artifactPath}:application/vnd.devcontainers.layer.v1+tar`;
yield exec(cmd);
core.info(`Pushed artifact to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push '${ociRepo}': ${error.message}`);
}
})));
});
}
function pushCollectionsMetadataToOCI(collectionJsonPath) {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
const sourceInfo = getGitHubMetadata();
const ociRepo = `${sourceInfo.owner}/${sourceInfo.repo}:latest`;
try {
const cmd = `oras push ghcr.io/${ociRepo} \
--manifest-config /dev/null:application/vnd.devcontainers \
./${collectionJsonPath}:application/vnd.devcontainers.collection.layer.v1+json`;
yield exec(cmd);
core.info(`Pushed collection metadata to '${ociRepo}'`);
}
catch (error) {
if (error instanceof Error)
core.setFailed(`Failed to push collection metadata '${ociRepo}': ${error.message}`);
}
});
}
exports.pushCollectionsMetadataToOCI = pushCollectionsMetadataToOCI;
function loginToGHCR() {
return __awaiter(this, void 0, void 0, function* () {
const exec = (0, util_1.promisify)(child_process.exec);
// Get GITHUB_TOKEN from environment
const githubToken = process.env.GITHUB_TOKEN;
if (!githubToken) {
core.setFailed('GITHUB_TOKEN environment variable is not set.');
return;
}
try {
yield exec(`oras login ghcr.io -u USERNAME -p ${githubToken}`);
core.info('Oras logged in successfully!');
}
catch (error) {
if (error instanceof Error)
core.setFailed(` Oras login failed!`);
}
});
}
function getFeaturesAndPackage(basePath, opts) {
return __awaiter(this, void 0, void 0, function* () {
const { shouldPublishToNPM, shouldTagIndividualFeatures, shouldPublishReleaseArtifacts, shouldPublishToOCI } = opts;
const featureDirs = fs.readdirSync(basePath); const featureDirs = fs.readdirSync(basePath);
let metadatas = []; let metadatas = [];
const exec = (0, util_1.promisify)(child_process.exec);
if (shouldPublishToOCI) {
yield loginToGHCR();
}
yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () { yield Promise.all(featureDirs.map((f) => __awaiter(this, void 0, void 0, function* () {
var _a;
core.info(`feature ==> ${f}`); core.info(`feature ==> ${f}`);
if (f !== '.' && f !== '..') { if (!f.startsWith('.')) {
const featureFolder = path_1.default.join(basePath, f); const featureFolder = path_1.default.join(basePath, f);
const archiveName = `${f}.tgz`;
yield tarDirectory(`${basePath}/${f}`, archiveName);
const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json'); const featureJsonPath = path_1.default.join(featureFolder, 'devcontainer-feature.json');
if (!fs.existsSync(featureJsonPath)) { if (!fs.existsSync(featureJsonPath)) {
core.error(`Feature ${f} is missing a devcontainer-feature.json`); core.error(`Feature '${f}' is missing a devcontainer-feature.json`);
core.setFailed('All features must have a devcontainer-feature.json'); core.setFailed('All features must have a devcontainer-feature.json');
return; return;
} }
const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8')); const featureMetadata = JSON.parse(fs.readFileSync(featureJsonPath, 'utf8'));
if (!featureMetadata.id || !featureMetadata.version) {
core.error(`Feature '${f}' is must defined an id and version`);
core.setFailed('Incomplete devcontainer-feature.json');
}
metadatas.push(featureMetadata); metadatas.push(featureMetadata);
const sourceInfo = getGitHubMetadata();
if (!sourceInfo.owner) {
core.setFailed('Could not determine repository owner.');
return;
}
const archiveName = `${f}.tgz`;
// ---- PUBLISH RELEASE ARTIFACTS (classic method) ----
if (shouldPublishReleaseArtifacts || shouldPublishToOCI) {
core.info(`** Tar'ing feature`);
yield tarDirectory(featureFolder, archiveName);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToOCI) {
core.info(`** Publishing to OCI`);
// TODO: CHECK IF THE FEATURE IS ALREADY PUBLISHED UNDER GIVEN TAG
yield pushArtifactToOCI(featureMetadata.version, f, archiveName);
}
// ---- TAG INDIVIDUAL FEATURES ----
if (shouldTagIndividualFeatures) {
core.info(`** Tagging individual feature`);
yield tagFeatureAtVersion(featureMetadata);
}
// ---- PUBLISH TO NPM ----
if (shouldPublishToNPM) {
core.info(`** Publishing to NPM`);
// Adds a package.json file to the feature folder
const packageJsonPath = path_1.default.join(featureFolder, 'package.json');
// if (!sourceInfo.tag) {
// core.error(`Feature ${f} is missing a tag! Cannot publish to NPM.`);
// core.setFailed('All features published to NPM must be tagged with a version');
// }
const packageJsonObject = {
name: `@${sourceInfo.owner}/${f}`,
version: featureMetadata.version,
description: `${(_a = featureMetadata.description) !== null && _a !== void 0 ? _a : 'My cool feature'}`,
author: `${sourceInfo.owner}`,
keywords: ['devcontainer-features']
};
yield (0, exports.writeLocalFile)(packageJsonPath, JSON.stringify(packageJsonObject, undefined, 4));
core.info(`Feature Folder is: ${featureFolder}`);
// Run npm pack, which 'tars' the folder
const packageName = yield exec(`npm pack ./${featureFolder}`);
if (packageName.stderr) {
core.error(`${packageName.stderr.toString()}`);
}
const publishOutput = yield exec(`npm publish --access public "${packageName.stdout.trim()}"`);
core.info(publishOutput.stdout);
if (publishOutput.stderr) {
core.error(`${publishOutput.stderr}`);
}
}
} }
}))); })));
if (metadatas.length === 0) { if (metadatas.length === 0) {
...@@ -119,23 +301,29 @@ function getFeaturesAndPackage(basePath) { ...@@ -119,23 +301,29 @@ function getFeaturesAndPackage(basePath) {
exports.getFeaturesAndPackage = getFeaturesAndPackage; exports.getFeaturesAndPackage = getFeaturesAndPackage;
function getTemplatesAndPackage(basePath) { function getTemplatesAndPackage(basePath) {
return __awaiter(this, void 0, void 0, function* () { return __awaiter(this, void 0, void 0, function* () {
let archives = []; const templateDirs = fs.readdirSync(basePath);
fs.readdir(basePath, (err, files) => { let metadatas = [];
if (err) { yield Promise.all(templateDirs.map((t) => __awaiter(this, void 0, void 0, function* () {
core.error(err.message); core.info(`template ==> ${t}`);
core.setFailed(`failed to get list of templates: ${err.message}`); if (!t.startsWith('.')) {
return; const templateFolder = path_1.default.join(basePath, t);
} const archiveName = `devcontainer-template-${t}.tgz`;
files.forEach(file => { // await tarDirectory(templateFolder, archiveName);
core.info(`template ==> ${file}`); const templateJsonPath = path_1.default.join(templateFolder, 'devcontainer-template.json');
if (file !== '.' && file !== '..') { if (!fs.existsSync(templateJsonPath)) {
const archiveName = `devcontainer-definition-${file}.tgz`; core.error(`Template '${t}' is missing a devcontainer-template.json`);
tarDirectory(`${basePath}/${file}`, archiveName); core.setFailed('All templates must have a devcontainer-template.json');
archives.push(archiveName); return;
} }
}); const templateMetadata = JSON.parse(fs.readFileSync(templateJsonPath, 'utf8'));
}); metadatas.push(templateMetadata);
return archives; }
})));
if (metadatas.length === 0) {
core.setFailed('No templates found');
return;
}
return metadatas;
}); });
} }
exports.getTemplatesAndPackage = getTemplatesAndPackage; exports.getTemplatesAndPackage = getTemplatesAndPackage;
name: "(Release) Release dev container features (v2)" name: "(Release) Release dev container features (v2)"
on: on:
push:
tags:
- "v*"
workflow_dispatch: workflow_dispatch:
jobs: jobs:
deploy: deploy:
if: ${{ github.ref == 'refs/heads/main' }}
runs-on: ubuntu-latest runs-on: ubuntu-latest
steps: steps:
- uses: actions/checkout@v2 - uses: actions/checkout@v2
- name: Generate tgz - name: Install Oras
run: |
curl -LO https://github.com/oras-project/oras/releases/download/v0.13.0/oras_0.13.0_linux_amd64.tar.gz
mkdir -p oras-install/
tar -zxf oras_0.13.0_*.tar.gz -C oras-install/
mv oras-install/oras /usr/local/bin/
rm -rf oras_0.13.0_*.tar.gz oras-install/
- name: "Publish features to OCI"
uses: ./.github/devcontainers-action # TODO: Once 'devcontainers/action' is published, use that. uses: ./.github/devcontainers-action # TODO: Once 'devcontainers/action' is published, use that.
with: with:
publish-features: "true" publish-features: "true"
publish-to-oci: "true"
base-path-to-features: "./src" base-path-to-features: "./src"
env:
- name: Remove temporary devcontainer-cli # TODO: Temporary GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
run: rm -rf ./devcontainer-cli-0*
- name: Get or Create Release at current tag
uses: ncipollo/release-action@v1
with:
allowUpdates: true # Lets us upload our own artifact from previous step
artifactErrorsFailBuild: true
artifacts: "./*.tgz,devcontainer-collection.json"
token: ${{ secrets.GITHUB_TOKEN }}
{ {
"id": "anaconda", "id": "anaconda",
"version": "1.0.0",
"name": "Anaconda", "name": "Anaconda",
"options": { "options": {
"version": { "version": {
...@@ -14,9 +15,5 @@ ...@@ -14,9 +15,5 @@
"containerEnv": { "containerEnv": {
"CONDA_DIR": "/usr/local/conda", "CONDA_DIR": "/usr/local/conda",
"PATH": "${PATH}:${CONDA_DIR}/bin:" "PATH": "${PATH}:${CONDA_DIR}/bin:"
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "aws-cli", "id": "aws-cli",
"version": "1.0.0",
"name": "AWS CLI", "name": "AWS CLI",
"description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs the AWS CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": { "options": {
...@@ -14,9 +15,5 @@ ...@@ -14,9 +15,5 @@
}, },
"extensions": [ "extensions": [
"AmazonWebServices.aws-toolkit-vscode" "AmazonWebServices.aws-toolkit-vscode"
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "azure-cli", "id": "azure-cli",
"version": "1.0.0",
"name": "Azure CLI", "name": "Azure CLI",
"description": "Installs the Azure CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs the Azure CLI along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": { "options": {
...@@ -14,9 +15,5 @@ ...@@ -14,9 +15,5 @@
}, },
"extensions": [ "extensions": [
"ms-vscode.azurecli" "ms-vscode.azurecli"
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "common-utils", "id": "common-utils",
"name": "Common Debian Utilities", "name": "Common Debian Utilities",
"version": "1.0.0",
"description": "Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-root user.", "description": "Installs a set of common command line utilities, Oh My Zsh!, and sets up a non-root user.",
"options": { "options": {
"install_Zsh": { "install_Zsh": {
...@@ -55,9 +56,5 @@ ...@@ -55,9 +56,5 @@
}, },
"extensions": [ "extensions": [
"ms-dotnettools.csharp" "ms-dotnettools.csharp"
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "desktop-lite", "id": "desktop-lite",
"version": "1.0.0",
"name": "Light-weight Desktop", "name": "Light-weight Desktop",
"description": "Adds a lightweight Fluxbox based desktop to the container that can be accessed using a VNC viewer or the web. GUI-based commands executed from the built-in VS code terminal will open on the desktop automatically.", "description": "Adds a lightweight Fluxbox based desktop to the container that can be accessed using a VNC viewer or the web. GUI-based commands executed from the built-in VS code terminal will open on the desktop automatically.",
"options": { "options": {
...@@ -50,9 +51,5 @@ ...@@ -50,9 +51,5 @@
"entrypoint": "/usr/local/share/desktop-init.sh", "entrypoint": "/usr/local/share/desktop-init.sh",
"containerEnv": { "containerEnv": {
"DISPLAY": ":1" "DISPLAY": ":1"
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "docker-from-docker", "id": "docker-from-docker",
"version": "1.0.0",
"name": "Docker (Docker-from-Docker)", "name": "Docker (Docker-from-Docker)",
"descripton": "Re-use the host docker socket, adding the Docker CLI to a container. Feature invokes a script to enable using a forwarded Docker socket within a container to run Docker commands.", "descripton": "Re-use the host docker socket, adding the Docker CLI to a container. Feature invokes a script to enable using a forwarded Docker socket within a container to run Docker commands.",
"options": { "options": {
...@@ -41,9 +42,5 @@ ...@@ -41,9 +42,5 @@
"target": "/var/run/docker-host.sock", "target": "/var/run/docker-host.sock",
"type": "bind" "type": "bind"
} }
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "docker-in-docker", "id": "docker-in-docker",
"version": "1.0.0",
"name": "Docker (Docker-in-Docker)", "name": "Docker (Docker-in-Docker)",
"description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.", "description": "Create child containers *inside* a container, independent from the host's docker instance. Installs Docker extension in the container along with needed CLIs.",
"options": { "options": {
...@@ -42,9 +43,5 @@ ...@@ -42,9 +43,5 @@
"target": "/var/lib/docker", "target": "/var/lib/docker",
"type": "volume" "type": "volume"
} }
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "dotnet", "id": "dotnet",
"version": "1.0.0",
"name": "Dotnet CLI", "name": "Dotnet CLI",
"description": "Installs the .NET CLI. Provides option of installing sdk or runtime, and option of versions to install. Uses latest version of .NET sdk as defaults to install.", "description": "Installs the .NET CLI. Provides option of installing sdk or runtime, and option of versions to install. Uses latest version of .NET sdk as defaults to install.",
"options": { "options": {
......
{ {
"id": "git-lfs", "id": "git-lfs",
"version": "1.0.0",
"name": "Git Large File Support (LFS)", "name": "Git Large File Support (LFS)",
"description": "Installs Git Large File Support (Git LFS) along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like git and curl.", "description": "Installs Git Large File Support (Git LFS) along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like git and curl.",
"options": { "options": {
...@@ -12,9 +13,5 @@ ...@@ -12,9 +13,5 @@
"default": "latest", "default": "latest",
"description": "Select version of Git LFS to install" "description": "Select version of Git LFS to install"
} }
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "git", "id": "git",
"version": "1.0.0",
"name": "Git (from source)", "name": "Git (from source)",
"description": "Install an up-to-date version of Git, built from source as needed. Useful for when you want the latest and greatest features. Auto-detects latest stable version and installs needed dependencies.", "description": "Install an up-to-date version of Git, built from source as needed. Useful for when you want the latest and greatest features. Auto-detects latest stable version and installs needed dependencies.",
"options": { "options": {
...@@ -17,9 +18,5 @@ ...@@ -17,9 +18,5 @@
"default": true, "default": true,
"description": "Install from PPA if available" "description": "Install from PPA if available"
} }
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "github-cli", "id": "github-cli",
"version": "1.0.0",
"name": "GitHub CLI", "name": "GitHub CLI",
"description": "Installs the GitHub CLI. Auto-detects latest version and installs needed dependencies.", "description": "Installs the GitHub CLI. Auto-detects latest version and installs needed dependencies.",
"options": { "options": {
...@@ -12,9 +13,5 @@ ...@@ -12,9 +13,5 @@
"default": "latest", "default": "latest",
"description": "Select version of the GitHub CLI, if not latest." "description": "Select version of the GitHub CLI, if not latest."
} }
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "go", "id": "go",
"version": "1.0.0",
"name": "Go", "name": "Go",
"description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.", "description": "Installs Go and common Go utilities. Auto-detects latest version and installs needed dependencies.",
"options": { "options": {
...@@ -28,9 +29,5 @@ ...@@ -28,9 +29,5 @@
], ],
"securityOpt": [ "securityOpt": [
"seccomp=unconfined" "seccomp=unconfined"
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "hugo", "id": "hugo",
"version": "1.0.0",
"name": "Hugo", "name": "Hugo",
"options": { "options": {
"version": { "version": {
...@@ -14,9 +15,5 @@ ...@@ -14,9 +15,5 @@
"containerEnv": { "containerEnv": {
"HUGO_DIR": "/usr/local/hugo", "HUGO_DIR": "/usr/local/hugo",
"PATH": "${HUGO_DIR}/bin:${PATH}" "PATH": "${HUGO_DIR}/bin:${PATH}"
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "java", "id": "java",
"version": "1.0.0",
"name": "Java (via SDKMAN!)", "name": "Java (via SDKMAN!)",
"description": "Installs Java, SDKMAN! (if not installed), and needed dependencies.", "description": "Installs Java, SDKMAN! (if not installed), and needed dependencies.",
"options": { "options": {
......
{ {
"id": "kubectl-helm-minikube", "id": "kubectl-helm-minikube",
"version": "1.0.0",
"name": "Kubectl, Helm, and Minkube", "name": "Kubectl, Helm, and Minkube",
"description": "Installs latest version of kubectl, Helm, and optionally minikube. Auto-detects latest versions and installs needed dependencies.", "description": "Installs latest version of kubectl, Helm, and optionally minikube. Auto-detects latest versions and installs needed dependencies.",
"options": { "options": {
...@@ -41,9 +42,5 @@ ...@@ -41,9 +42,5 @@
"target": "/home/vscode/.minikube", "target": "/home/vscode/.minikube",
"type": "volume" "type": "volume"
} }
], ]
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "node", "id": "node",
"version": "1.0.0",
"name": "Node.js (via nvm) and yarn", "name": "Node.js (via nvm) and yarn",
"description": "Installs Node.js, nvm, yarn, and needed dependencies.", "description": "Installs Node.js, nvm, yarn, and needed dependencies.",
"options": { "options": {
......
{ {
"id": "oryx", "id": "oryx",
"version": "1.0.0",
"name": "Oryx", "name": "Oryx",
"description": "Installs the oryx CLI", "description": "Installs the oryx CLI",
"containerEnv": { "containerEnv": {
......
{ {
"id": "php", "id": "php",
"version": "1.0.0",
"name": "PHP", "name": "PHP",
"options": { "options": {
"version": { "version": {
......
{ {
"id": "powershell", "id": "powershell",
"version": "1.0.0",
"name": "PowerShell", "name": "PowerShell",
"description": "Installs PowerShell along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.", "description": "Installs PowerShell along with needed dependencies. Useful for base Dockerfiles that often are missing required install dependencies like gpg.",
"options": { "options": {
...@@ -13,9 +14,5 @@ ...@@ -13,9 +14,5 @@
"default": "latest", "default": "latest",
"description": "Select or enter a version of PowerShell." "description": "Select or enter a version of PowerShell."
} }
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "python", "id": "python",
"version": "1.0.0",
"name": "Python", "name": "Python",
"description": "Installs the provided version of Python, as well as PIPX, and other common Python utilities. JupyterLab is conditionally installed with the python feature. Note: May require source code compilation.", "description": "Installs the provided version of Python, as well as PIPX, and other common Python utilities. JupyterLab is conditionally installed with the python feature. Note: May require source code compilation.",
"options": { "options": {
......
{ {
"id": "ruby", "id": "ruby",
"version": "1.0.0",
"name": "Ruby (via rvm)", "name": "Ruby (via rvm)",
"description": "Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.", "description": "Installs Ruby, rvm, rbenv, common Ruby utilities, and needed dependencies.",
"options": { "options": {
......
{ {
"id": "rust", "id": "rust",
"version": "1.0.0",
"name": "Rust", "name": "Rust",
"description": "Installs Rust, common Rust utilities, and their required dependencies", "description": "Installs Rust, common Rust utilities, and their required dependencies",
"options": { "options": {
...@@ -50,9 +51,5 @@ ...@@ -50,9 +51,5 @@
"**/target/**": true "**/target/**": true
}, },
"rust-analyzer.checkOnSave.command": "clippy" "rust-analyzer.checkOnSave.command": "clippy"
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
{ {
"id": "sshd", "id": "sshd",
"version": "1.0.0",
"name": "SSH server", "name": "SSH server",
"description": "Adds a SSH server into a container so that you can use an external terminal, sftp, or SSHFS to interact with it.", "description": "Adds a SSH server into a container so that you can use an external terminal, sftp, or SSHFS to interact with it.",
"options": { "options": {
...@@ -12,9 +13,5 @@ ...@@ -12,9 +13,5 @@
"description": "Currently unused." "description": "Currently unused."
} }
}, },
"entrypoint": "/usr/local/share/ssh-init.sh", "entrypoint": "/usr/local/share/ssh-init.sh"
"install": { }
"app": "",
"file": "install.sh"
}
}
\ No newline at end of file
{ {
"id": "terraform", "id": "terraform",
"version": "1.0.0",
"name": "Terraform, tflint, and TFGrunt", "name": "Terraform, tflint, and TFGrunt",
"description": "Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects latest version and installs needed dependencies.", "description": "Installs the Terraform CLI and optionally TFLint and Terragrunt. Auto-detects latest version and installs needed dependencies.",
"options": { "options": {
...@@ -42,9 +43,5 @@ ...@@ -42,9 +43,5 @@
"args": [] "args": []
}, },
"azureTerraform.terminal": "integrated" "azureTerraform.terminal": "integrated"
},
"install": {
"app": "",
"file": "install.sh"
} }
} }
\ No newline at end of file
Markdown is supported
0% or
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment