diff --git a/CHANGELOG.md b/CHANGELOG.md
index ef2d55f9..19b917d5 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,21 @@
# Changelog
+## [3.0.0](https://github.com/OctopusDeploy/create-release-action/compare/v2.1.0...v3.0.0) (2022-12-13)
+
+
+### ⚠ BREAKING CHANGES
+
+* Update action to use the native API client
+
+### Features
+
+* Update action to use the native API client ([982b7d4](https://github.com/OctopusDeploy/create-release-action/commit/982b7d4ab80a7c5f43aa38a90c9092c2b72768e8))
+
+
+### Bug Fixes
+
+* updated dependencies ([c10ca9e](https://github.com/OctopusDeploy/create-release-action/commit/c10ca9e5a3d9e030bba775cd474c54bbe0aab4e9))
+
## [2.1.0](https://github.com/OctopusDeploy/create-release-action/compare/v2.0.1...v2.1.0) (2022-11-03)
diff --git a/dist/index.js b/dist/index.js
index 83716564..0be51edc 100644
--- a/dist/index.js
+++ b/dist/index.js
@@ -3441,10 +3441,425 @@ var __exportStar = (this && this.__exportStar) || function(m, exports) {
for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
};
Object.defineProperty(exports, "__esModule", ({ value: true }));
+__exportStar(__nccwpck_require__(3877), exports);
__exportStar(__nccwpck_require__(714), exports);
__exportStar(__nccwpck_require__(7857), exports);
+/***/ }),
+
+/***/ 3877:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ var desc = Object.getOwnPropertyDescriptor(m, k);
+ if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) {
+ desc = { enumerable: true, get: function() { return m[k]; } };
+ }
+ Object.defineProperty(o, k2, desc);
+}) : (function(o, m, k, k2) {
+ if (k2 === undefined) k2 = k;
+ o[k2] = m[k];
+}));
+var __exportStar = (this && this.__exportStar) || function(m, exports) {
+ for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p);
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+__exportStar(__nccwpck_require__(8255), exports);
+__exportStar(__nccwpck_require__(2778), exports);
+__exportStar(__nccwpck_require__(3599), exports);
+
+
+/***/ }),
+
+/***/ 8255:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __generator = (this && this.__generator) || function (thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.NuGetPackageBuilder = void 0;
+var zipUtils_1 = __nccwpck_require__(7273);
+var fs_1 = __importDefault(__nccwpck_require__(7147));
+var path_1 = __importDefault(__nccwpck_require__(1017));
+var NuGetPackageBuilder = /** @class */ (function () {
+ function NuGetPackageBuilder() {
+ }
+ NuGetPackageBuilder.prototype.pack = function (args) {
+ return __awaiter(this, void 0, void 0, function () {
+ var archiveFilename, inputFilePatterns, nuspecFilename, nuspecFile;
+ return __generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ archiveFilename = "".concat(args.packageId, ".").concat(args.version, ".nupkg");
+ inputFilePatterns = args.inputFilePatterns;
+ if (args.nuspecArgs) {
+ nuspecFilename = "".concat(args.packageId, ".nuspec");
+ nuspecFile = path_1.default.join(args.basePath, nuspecFilename);
+ fs_1.default.writeFileSync(nuspecFile, '\n');
+ fs_1.default.appendFileSync(nuspecFile, '\n');
+ fs_1.default.appendFileSync(nuspecFile, " \n");
+ fs_1.default.appendFileSync(nuspecFile, " ".concat(args.packageId, "\n"));
+ fs_1.default.appendFileSync(nuspecFile, " ".concat(args.version, "\n"));
+ fs_1.default.appendFileSync(nuspecFile, " ".concat(args.nuspecArgs.description, "\n"));
+ fs_1.default.appendFileSync(nuspecFile, " ".concat(args.nuspecArgs.authors.join(","), "\n"));
+ if (args.nuspecArgs.releaseNotes) {
+ fs_1.default.appendFileSync(nuspecFile, " ".concat(args.nuspecArgs.releaseNotes, "\n"));
+ }
+ fs_1.default.appendFileSync(nuspecFile, " \n");
+ fs_1.default.appendFileSync(nuspecFile, "\n");
+ // include the nuspec into the package
+ inputFilePatterns.push(nuspecFilename);
+ }
+ return [4 /*yield*/, (0, zipUtils_1.doZip)(args.basePath, inputFilePatterns, args.outputFolder, archiveFilename, args.logger, 8, args.overwrite)];
+ case 1:
+ _a.sent();
+ return [2 /*return*/, archiveFilename];
+ }
+ });
+ });
+ };
+ return NuGetPackageBuilder;
+}());
+exports.NuGetPackageBuilder = NuGetPackageBuilder;
+
+
+/***/ }),
+
+/***/ 2778:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+
+
+/***/ }),
+
+/***/ 3599:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __generator = (this && this.__generator) || function (thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.ZipPackageBuilder = void 0;
+var zipUtils_1 = __nccwpck_require__(7273);
+var ZipPackageBuilder = /** @class */ (function () {
+ function ZipPackageBuilder() {
+ }
+ ZipPackageBuilder.prototype.pack = function (args) {
+ return __awaiter(this, void 0, void 0, function () {
+ var archiveFilename;
+ return __generator(this, function (_a) {
+ switch (_a.label) {
+ case 0:
+ archiveFilename = "".concat(args.packageId, ".").concat(args.version, ".zip");
+ return [4 /*yield*/, (0, zipUtils_1.doZip)(args.basePath, args.inputFilePatterns, args.outputFolder, archiveFilename, args.logger, args.compressionLevel, args.overwrite)];
+ case 1:
+ _a.sent();
+ return [2 /*return*/, archiveFilename];
+ }
+ });
+ });
+ };
+ return ZipPackageBuilder;
+}());
+exports.ZipPackageBuilder = ZipPackageBuilder;
+
+
+/***/ }),
+
+/***/ 7273:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+var __generator = (this && this.__generator) || function (thisArg, body) {
+ var _ = { label: 0, sent: function() { if (t[0] & 1) throw t[1]; return t[1]; }, trys: [], ops: [] }, f, y, t, g;
+ return g = { next: verb(0), "throw": verb(1), "return": verb(2) }, typeof Symbol === "function" && (g[Symbol.iterator] = function() { return this; }), g;
+ function verb(n) { return function (v) { return step([n, v]); }; }
+ function step(op) {
+ if (f) throw new TypeError("Generator is already executing.");
+ while (_) try {
+ if (f = 1, y && (t = op[0] & 2 ? y["return"] : op[0] ? y["throw"] || ((t = y["return"]) && t.call(y), 0) : y.next) && !(t = t.call(y, op[1])).done) return t;
+ if (y = 0, t) op = [op[0] & 2, t.value];
+ switch (op[0]) {
+ case 0: case 1: t = op; break;
+ case 4: _.label++; return { value: op[1], done: false };
+ case 5: _.label++; y = op[1]; op = [0]; continue;
+ case 7: op = _.ops.pop(); _.trys.pop(); continue;
+ default:
+ if (!(t = _.trys, t = t.length > 0 && t[t.length - 1]) && (op[0] === 6 || op[0] === 2)) { _ = 0; continue; }
+ if (op[0] === 3 && (!t || (op[1] > t[0] && op[1] < t[3]))) { _.label = op[1]; break; }
+ if (op[0] === 6 && _.label < t[1]) { _.label = t[1]; t = op; break; }
+ if (t && _.label < t[2]) { _.label = t[2]; _.ops.push(op); break; }
+ if (t[2]) _.ops.pop();
+ _.trys.pop(); continue;
+ }
+ op = body.call(thisArg, _);
+ } catch (e) { op = [6, e]; y = 0; } finally { f = t = 0; }
+ if (op[0] & 5) throw op[1]; return { value: op[0] ? op[1] : void 0, done: true };
+ }
+};
+var __values = (this && this.__values) || function(o) {
+ var s = typeof Symbol === "function" && Symbol.iterator, m = s && o[s], i = 0;
+ if (m) return m.call(o);
+ if (o && typeof o.length === "number") return {
+ next: function () {
+ if (o && i >= o.length) o = void 0;
+ return { value: o && o[i++], done: !o };
+ }
+ };
+ throw new TypeError(s ? "Object is not iterable." : "Symbol.iterator is not defined.");
+};
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.doZip = void 0;
+var adm_zip_1 = __importDefault(__nccwpck_require__(6761));
+var fs_1 = __importDefault(__nccwpck_require__(7147));
+var glob_1 = __nccwpck_require__(2782);
+var path_1 = __importDefault(__nccwpck_require__(1017));
+var util_1 = __nccwpck_require__(3837);
+var globp = (0, util_1.promisify)(glob_1.glob);
+/**
+ * Creates a Zip file with a given filename from the inputFilePatterns.
+ *
+ * @param {string} basePath The base path for the input files.
+ * @param {string[]} inputFilePatterns Array of input file patterns, relative to the basePath. Specific files and globbing patterns are both supported.
+ * @param {string} outputFolder The folder to write the resulting Zip file to.
+ * @param {string} zipFilename The name of the Zip file to create.
+ * @param {Logger} logger Logger implementation for writing debug and info messages
+ * @param {number} compressionLevel Optional override for the compression level. Defaults to 8 if not specified.
+ * @param {boolean} overwrite Whether to overwrite the Zip file if it already exists. Defaults to true if not specified.
+ */
+function doZip(basePath, inputFilePatterns, outputFolder, zipFilename, logger, compressionLevel, overwrite) {
+ var _a, _b, _c;
+ return __awaiter(this, void 0, void 0, function () {
+ var archivePath, initialWorkingDirectory, zip, files, files_1, files_1_1, file, dirName;
+ var e_1, _d;
+ return __generator(this, function (_e) {
+ switch (_e.label) {
+ case 0:
+ archivePath = path_1.default.resolve(outputFolder, zipFilename);
+ (_a = logger.info) === null || _a === void 0 ? void 0 : _a.call(logger, "Writing to package: ".concat(archivePath, "..."));
+ initialWorkingDirectory = process.cwd();
+ process.chdir(path_1.default.resolve(initialWorkingDirectory, basePath));
+ zip = new adm_zip_1.default();
+ return [4 /*yield*/, expandGlobs(inputFilePatterns)];
+ case 1:
+ files = _e.sent();
+ try {
+ for (files_1 = __values(files), files_1_1 = files_1.next(); !files_1_1.done; files_1_1 = files_1.next()) {
+ file = files_1_1.value;
+ (_b = logger.debug) === null || _b === void 0 ? void 0 : _b.call(logger, "Adding file: ".concat(file, "..."));
+ if (fs_1.default.lstatSync(file).isDirectory()) {
+ zip.addFile("".concat(file, "/"), Buffer.from([0x00]));
+ }
+ else {
+ dirName = path_1.default.dirname(file);
+ zip.addLocalFile(file, dirName === "." ? "" : dirName);
+ }
+ }
+ }
+ catch (e_1_1) { e_1 = { error: e_1_1 }; }
+ finally {
+ try {
+ if (files_1_1 && !files_1_1.done && (_d = files_1.return)) _d.call(files_1);
+ }
+ finally { if (e_1) throw e_1.error; }
+ }
+ if (compressionLevel) {
+ (_c = logger.info) === null || _c === void 0 ? void 0 : _c.call(logger, "Overriding compression level: ".concat(compressionLevel));
+ }
+ setCompressionLevel(zip, compressionLevel || 8);
+ process.chdir(initialWorkingDirectory);
+ return [4 /*yield*/, zip.writeZipPromise(archivePath, { overwrite: overwrite || true })];
+ case 2:
+ _e.sent();
+ return [2 /*return*/];
+ }
+ });
+ });
+}
+exports.doZip = doZip;
+var setCompressionLevel = function (zip, level) {
+ var entries = zip.getEntries();
+ for (var i = 0; i < entries.length; i++) {
+ var entry = entries[i];
+ if (entry) {
+ entry.header.method = level;
+ }
+ }
+};
+function expandGlobs(filePatterns) {
+ return __awaiter(this, void 0, void 0, function () {
+ var files, filePatterns_1, filePatterns_1_1, filePattern, _a, _b, fileName, filePaths, filePaths_1, filePaths_1_1, filePath, e_2_1, e_3_1;
+ var e_3, _c, e_2, _d, e_4, _e;
+ return __generator(this, function (_f) {
+ switch (_f.label) {
+ case 0:
+ files = [];
+ _f.label = 1;
+ case 1:
+ _f.trys.push([1, 13, 14, 15]);
+ filePatterns_1 = __values(filePatterns), filePatterns_1_1 = filePatterns_1.next();
+ _f.label = 2;
+ case 2:
+ if (!!filePatterns_1_1.done) return [3 /*break*/, 12];
+ filePattern = filePatterns_1_1.value;
+ _f.label = 3;
+ case 3:
+ _f.trys.push([3, 9, 10, 11]);
+ _a = (e_2 = void 0, __values(filePattern.split(","))), _b = _a.next();
+ _f.label = 4;
+ case 4:
+ if (!!_b.done) return [3 /*break*/, 8];
+ fileName = _b.value;
+ if (!glob_1.glob.hasMagic(fileName)) return [3 /*break*/, 6];
+ return [4 /*yield*/, globp(fileName)];
+ case 5:
+ filePaths = _f.sent();
+ try {
+ for (filePaths_1 = (e_4 = void 0, __values(filePaths)), filePaths_1_1 = filePaths_1.next(); !filePaths_1_1.done; filePaths_1_1 = filePaths_1.next()) {
+ filePath = filePaths_1_1.value;
+ files.push(filePath);
+ }
+ }
+ catch (e_4_1) { e_4 = { error: e_4_1 }; }
+ finally {
+ try {
+ if (filePaths_1_1 && !filePaths_1_1.done && (_e = filePaths_1.return)) _e.call(filePaths_1);
+ }
+ finally { if (e_4) throw e_4.error; }
+ }
+ return [3 /*break*/, 7];
+ case 6:
+ files.push(fileName);
+ _f.label = 7;
+ case 7:
+ _b = _a.next();
+ return [3 /*break*/, 4];
+ case 8: return [3 /*break*/, 11];
+ case 9:
+ e_2_1 = _f.sent();
+ e_2 = { error: e_2_1 };
+ return [3 /*break*/, 11];
+ case 10:
+ try {
+ if (_b && !_b.done && (_d = _a.return)) _d.call(_a);
+ }
+ finally { if (e_2) throw e_2.error; }
+ return [7 /*endfinally*/];
+ case 11:
+ filePatterns_1_1 = filePatterns_1.next();
+ return [3 /*break*/, 2];
+ case 12: return [3 /*break*/, 15];
+ case 13:
+ e_3_1 = _f.sent();
+ e_3 = { error: e_3_1 };
+ return [3 /*break*/, 15];
+ case 14:
+ try {
+ if (filePatterns_1_1 && !filePatterns_1_1.done && (_c = filePatterns_1.return)) _c.call(filePatterns_1);
+ }
+ finally { if (e_3) throw e_3.error; }
+ return [7 /*endfinally*/];
+ case 15: return [2 /*return*/, files];
+ }
+ });
+ });
+}
+
+
/***/ }),
/***/ 714:
@@ -5546,30 +5961,35 @@ var ServerTaskWaiter = /** @class */ (function () {
}, timeout);
_a.label = 1;
case 1:
- if (!!stop) return [3 /*break*/, 7];
- if (!pollingCallback) return [3 /*break*/, 3];
- return [4 /*yield*/, spaceServerTaskRepository.getDetails(serverTaskId)];
+ if (!!stop) return [3 /*break*/, 10];
+ _a.label = 2;
case 2:
+ _a.trys.push([2, , 7, 8]);
+ if (!pollingCallback) return [3 /*break*/, 4];
+ return [4 /*yield*/, spaceServerTaskRepository.getDetails(serverTaskId)];
+ case 3:
taskDetails = _a.sent();
pollingCallback(taskDetails);
if (taskDetails.Task.IsCompleted) {
- clearTimeout(t);
return [2 /*return*/, taskDetails.Task];
}
- return [3 /*break*/, 5];
- case 3: return [4 /*yield*/, spaceServerTaskRepository.getById(serverTaskId)];
- case 4:
+ return [3 /*break*/, 6];
+ case 4: return [4 /*yield*/, spaceServerTaskRepository.getById(serverTaskId)];
+ case 5:
task = _a.sent();
if (task.IsCompleted) {
- clearTimeout(t);
return [2 /*return*/, task];
}
- _a.label = 5;
- case 5: return [4 /*yield*/, sleep(statusCheckSleepCycle)];
- case 6:
+ _a.label = 6;
+ case 6: return [3 /*break*/, 8];
+ case 7:
+ clearTimeout(t);
+ return [7 /*endfinally*/];
+ case 8: return [4 /*yield*/, sleep(statusCheckSleepCycle)];
+ case 9:
_a.sent();
return [3 /*break*/, 1];
- case 7: return [2 /*return*/, null];
+ case 10: return [2 /*return*/, null];
}
});
});
@@ -6615,173 +7035,5680 @@ exports.resolveSpaceId = resolveSpaceId;
/***/ }),
-/***/ 5626:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 5626:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isSpaceScopedArgs = void 0;
+function isSpaceScopedArgs(args) {
+ return "spaceName" in args;
+}
+exports.isSpaceScopedArgs = isSpaceScopedArgs;
+
+
+/***/ }),
+
+/***/ 3667:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isSpaceScopedOperation = void 0;
+function isSpaceScopedOperation(command) {
+ return "spaceName" in command;
+}
+exports.isSpaceScopedOperation = isSpaceScopedOperation;
+
+
+/***/ }),
+
+/***/ 3295:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isSpaceScopedRequest = void 0;
+function isSpaceScopedRequest(command) {
+ return "spaceName" in command;
+}
+exports.isSpaceScopedRequest = isSpaceScopedRequest;
+
+
+/***/ }),
+
+/***/ 492:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isSpaceScopedResource = void 0;
+function isSpaceScopedResource(resource) {
+ return "SpaceId" in resource;
+}
+exports.isSpaceScopedResource = isSpaceScopedResource;
+
+
+/***/ }),
+
+/***/ 7218:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.spaceScopedRoutePrefix = void 0;
+var apiLocation_1 = __nccwpck_require__(7083);
+exports.spaceScopedRoutePrefix = "".concat(apiLocation_1.apiLocation, "/{spaceId}");
+
+
+/***/ }),
+
+/***/ 1547:
+/***/ ((__unused_webpack_module, exports) => {
+
+"use strict";
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.SubscriptionRecord = void 0;
+var SubscriptionRecord = /** @class */ (function () {
+ function SubscriptionRecord() {
+ this.subscriptions = {};
+ }
+ SubscriptionRecord.prototype.subscribe = function (registrationName, callback) {
+ var _this = this;
+ this.subscriptions[registrationName] = callback;
+ return function () { return _this.unsubscribe(registrationName); };
+ };
+ SubscriptionRecord.prototype.unsubscribe = function (registrationName) {
+ delete this.subscriptions[registrationName];
+ };
+ SubscriptionRecord.prototype.notify = function (predicate, data) {
+ var _this = this;
+ Object.keys(this.subscriptions)
+ .filter(predicate)
+ .forEach(function (key) { return _this.subscriptions[key](data); });
+ };
+ SubscriptionRecord.prototype.notifyAll = function (data) {
+ this.notify(function () { return true; }, data);
+ };
+ SubscriptionRecord.prototype.notifySingle = function (registrationName, data) {
+ if (registrationName in this.subscriptions) {
+ this.subscriptions[registrationName](data);
+ }
+ };
+ return SubscriptionRecord;
+}());
+exports.SubscriptionRecord = SubscriptionRecord;
+
+
+/***/ }),
+
+/***/ 7132:
+/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+
+"use strict";
+
+var __importDefault = (this && this.__importDefault) || function (mod) {
+ return (mod && mod.__esModule) ? mod : { "default": mod };
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.isPropertyDefinedAndNotNull = exports.typeSafeHasOwnProperty = exports.ensureSuffix = exports.ensurePrefix = exports.determineServerEndpoint = exports.getResolver = exports.getServerEndpoint = exports.getQueryValue = void 0;
+var lodash_1 = __nccwpck_require__(250);
+var urijs_1 = __importDefault(__nccwpck_require__(4190));
+var resolver_1 = __nccwpck_require__(8043);
+var getQueryValue = function (key, location) {
+ var result;
+ (0, urijs_1.default)(location).hasQuery(key, function (value) {
+ result = value;
+ });
+ return result;
+};
+exports.getQueryValue = getQueryValue;
+var getServerEndpoint = function (location) {
+ if (location === void 0) { location = window.location; }
+ return (0, exports.getQueryValue)("octopus.server", location.href) || (0, exports.determineServerEndpoint)(location);
+};
+exports.getServerEndpoint = getServerEndpoint;
+var getResolver = function (base) {
+ var resolver = new resolver_1.Resolver(base);
+ return resolver.resolve.bind(resolver);
+};
+exports.getResolver = getResolver;
+var determineServerEndpoint = function (location) {
+ var endpoint = (0, exports.ensureSuffix)("//", "" + location.protocol) + location.host;
+ var path = (0, exports.ensurePrefix)("/", location.pathname);
+ if (path.length >= 1) {
+ var lastSegmentIndex = path.lastIndexOf("/");
+ if (lastSegmentIndex >= 0) {
+ path = path.substring(0, lastSegmentIndex + 1);
+ }
+ }
+ endpoint = endpoint + path;
+ return endpoint;
+};
+exports.determineServerEndpoint = determineServerEndpoint;
+exports.ensurePrefix = (0, lodash_1.curry)(function (prefix, value) { return (!value.startsWith(prefix) ? "".concat(prefix).concat(value) : value); });
+exports.ensureSuffix = (0, lodash_1.curry)(function (suffix, value) { return (!value.endsWith(suffix) ? "".concat(value).concat(suffix) : value); });
+var typeSafeHasOwnProperty = function (target, key) {
+ return target.hasOwnProperty(key);
+};
+exports.typeSafeHasOwnProperty = typeSafeHasOwnProperty;
+var isPropertyDefinedAndNotNull = function (target, key) {
+ return (0, exports.typeSafeHasOwnProperty)(target, key) && target[key] !== null && target[key] !== undefined;
+};
+exports.isPropertyDefinedAndNotNull = isPropertyDefinedAndNotNull;
+
+
+/***/ }),
+
+/***/ 6589:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var balanced = __nccwpck_require__(9417);
+
+module.exports = expandTop;
+
+var escSlash = '\0SLASH'+Math.random()+'\0';
+var escOpen = '\0OPEN'+Math.random()+'\0';
+var escClose = '\0CLOSE'+Math.random()+'\0';
+var escComma = '\0COMMA'+Math.random()+'\0';
+var escPeriod = '\0PERIOD'+Math.random()+'\0';
+
+function numeric(str) {
+ return parseInt(str, 10) == str
+ ? parseInt(str, 10)
+ : str.charCodeAt(0);
+}
+
+function escapeBraces(str) {
+ return str.split('\\\\').join(escSlash)
+ .split('\\{').join(escOpen)
+ .split('\\}').join(escClose)
+ .split('\\,').join(escComma)
+ .split('\\.').join(escPeriod);
+}
+
+function unescapeBraces(str) {
+ return str.split(escSlash).join('\\')
+ .split(escOpen).join('{')
+ .split(escClose).join('}')
+ .split(escComma).join(',')
+ .split(escPeriod).join('.');
+}
+
+
+// Basically just str.split(","), but handling cases
+// where we have nested braced sections, which should be
+// treated as individual members, like {a,{b,c},d}
+function parseCommaParts(str) {
+ if (!str)
+ return [''];
+
+ var parts = [];
+ var m = balanced('{', '}', str);
+
+ if (!m)
+ return str.split(',');
+
+ var pre = m.pre;
+ var body = m.body;
+ var post = m.post;
+ var p = pre.split(',');
+
+ p[p.length-1] += '{' + body + '}';
+ var postParts = parseCommaParts(post);
+ if (post.length) {
+ p[p.length-1] += postParts.shift();
+ p.push.apply(p, postParts);
+ }
+
+ parts.push.apply(parts, p);
+
+ return parts;
+}
+
+function expandTop(str) {
+ if (!str)
+ return [];
+
+ // I don't know why Bash 4.3 does this, but it does.
+ // Anything starting with {} will have the first two bytes preserved
+ // but *only* at the top level, so {},a}b will not expand to anything,
+ // but a{},b}c will be expanded to [a}c,abc].
+ // One could argue that this is a bug in Bash, but since the goal of
+ // this module is to match Bash's rules, we escape a leading {}
+ if (str.substr(0, 2) === '{}') {
+ str = '\\{\\}' + str.substr(2);
+ }
+
+ return expand(escapeBraces(str), true).map(unescapeBraces);
+}
+
+function embrace(str) {
+ return '{' + str + '}';
+}
+function isPadded(el) {
+ return /^-?0\d/.test(el);
+}
+
+function lte(i, y) {
+ return i <= y;
+}
+function gte(i, y) {
+ return i >= y;
+}
+
+function expand(str, isTop) {
+ var expansions = [];
+
+ var m = balanced('{', '}', str);
+ if (!m) return [str];
+
+ // no need to expand pre, since it is guaranteed to be free of brace-sets
+ var pre = m.pre;
+ var post = m.post.length
+ ? expand(m.post, false)
+ : [''];
+
+ if (/\$$/.test(m.pre)) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre+ '{' + m.body + '}' + post[k];
+ expansions.push(expansion);
+ }
+ } else {
+ var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body);
+ var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body);
+ var isSequence = isNumericSequence || isAlphaSequence;
+ var isOptions = m.body.indexOf(',') >= 0;
+ if (!isSequence && !isOptions) {
+ // {a},b}
+ if (m.post.match(/,.*\}/)) {
+ str = m.pre + '{' + m.body + escClose + m.post;
+ return expand(str);
+ }
+ return [str];
+ }
+
+ var n;
+ if (isSequence) {
+ n = m.body.split(/\.\./);
+ } else {
+ n = parseCommaParts(m.body);
+ if (n.length === 1) {
+ // x{{a,b}}y ==> x{a}y x{b}y
+ n = expand(n[0], false).map(embrace);
+ if (n.length === 1) {
+ return post.map(function(p) {
+ return m.pre + n[0] + p;
+ });
+ }
+ }
+ }
+
+ // at this point, n is the parts, and we know it's not a comma set
+ // with a single entry.
+ var N;
+
+ if (isSequence) {
+ var x = numeric(n[0]);
+ var y = numeric(n[1]);
+ var width = Math.max(n[0].length, n[1].length)
+ var incr = n.length == 3
+ ? Math.abs(numeric(n[2]))
+ : 1;
+ var test = lte;
+ var reverse = y < x;
+ if (reverse) {
+ incr *= -1;
+ test = gte;
+ }
+ var pad = n.some(isPadded);
+
+ N = [];
+
+ for (var i = x; test(i, y); i += incr) {
+ var c;
+ if (isAlphaSequence) {
+ c = String.fromCharCode(i);
+ if (c === '\\')
+ c = '';
+ } else {
+ c = String(i);
+ if (pad) {
+ var need = width - c.length;
+ if (need > 0) {
+ var z = new Array(need + 1).join('0');
+ if (i < 0)
+ c = '-' + z + c.slice(1);
+ else
+ c = z + c;
+ }
+ }
+ }
+ N.push(c);
+ }
+ } else {
+ N = [];
+
+ for (var j = 0; j < n.length; j++) {
+ N.push.apply(N, expand(n[j], false));
+ }
+ }
+
+ for (var j = 0; j < N.length; j++) {
+ for (var k = 0; k < post.length; k++) {
+ var expansion = pre + N[j] + post[k];
+ if (!isTop || isSequence || expansion)
+ expansions.push(expansion);
+ }
+ }
+ }
+
+ return expansions;
+}
+
+
+
+/***/ }),
+
+/***/ 150:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+exports.setopts = setopts
+exports.ownProp = ownProp
+exports.makeAbs = makeAbs
+exports.finish = finish
+exports.mark = mark
+exports.isIgnored = isIgnored
+exports.childrenIgnored = childrenIgnored
+
+function ownProp (obj, field) {
+ return Object.prototype.hasOwnProperty.call(obj, field)
+}
+
+var fs = __nccwpck_require__(7147)
+var path = __nccwpck_require__(1017)
+var minimatch = __nccwpck_require__(7787)
+var isAbsolute = (__nccwpck_require__(1017).isAbsolute)
+var Minimatch = minimatch.Minimatch
+
+function alphasort (a, b) {
+ return a.localeCompare(b, 'en')
+}
+
+function setupIgnores (self, options) {
+ self.ignore = options.ignore || []
+
+ if (!Array.isArray(self.ignore))
+ self.ignore = [self.ignore]
+
+ if (self.ignore.length) {
+ self.ignore = self.ignore.map(ignoreMap)
+ }
+}
+
+// ignore patterns are always in dot:true mode.
+function ignoreMap (pattern) {
+ var gmatcher = null
+ if (pattern.slice(-3) === '/**') {
+ var gpattern = pattern.replace(/(\/\*\*)+$/, '')
+ gmatcher = new Minimatch(gpattern, { dot: true })
+ }
+
+ return {
+ matcher: new Minimatch(pattern, { dot: true }),
+ gmatcher: gmatcher
+ }
+}
+
+function setopts (self, pattern, options) {
+ if (!options)
+ options = {}
+
+ // base-matching: just use globstar for that.
+ if (options.matchBase && -1 === pattern.indexOf("/")) {
+ if (options.noglobstar) {
+ throw new Error("base matching requires globstar")
+ }
+ pattern = "**/" + pattern
+ }
+
+ self.silent = !!options.silent
+ self.pattern = pattern
+ self.strict = options.strict !== false
+ self.realpath = !!options.realpath
+ self.realpathCache = options.realpathCache || Object.create(null)
+ self.follow = !!options.follow
+ self.dot = !!options.dot
+ self.mark = !!options.mark
+ self.nodir = !!options.nodir
+ if (self.nodir)
+ self.mark = true
+ self.sync = !!options.sync
+ self.nounique = !!options.nounique
+ self.nonull = !!options.nonull
+ self.nosort = !!options.nosort
+ self.nocase = !!options.nocase
+ self.stat = !!options.stat
+ self.noprocess = !!options.noprocess
+ self.absolute = !!options.absolute
+ self.fs = options.fs || fs
+
+ self.maxLength = options.maxLength || Infinity
+ self.cache = options.cache || Object.create(null)
+ self.statCache = options.statCache || Object.create(null)
+ self.symlinks = options.symlinks || Object.create(null)
+
+ setupIgnores(self, options)
+
+ self.changedCwd = false
+ var cwd = process.cwd()
+ if (!ownProp(options, "cwd"))
+ self.cwd = path.resolve(cwd)
+ else {
+ self.cwd = path.resolve(options.cwd)
+ self.changedCwd = self.cwd !== cwd
+ }
+
+ self.root = options.root || path.resolve(self.cwd, "/")
+ self.root = path.resolve(self.root)
+
+ // TODO: is an absolute `cwd` supposed to be resolved against `root`?
+ // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test')
+ self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd)
+ self.nomount = !!options.nomount
+
+ if (process.platform === "win32") {
+ self.root = self.root.replace(/\\/g, "/")
+ self.cwd = self.cwd.replace(/\\/g, "/")
+ self.cwdAbs = self.cwdAbs.replace(/\\/g, "/")
+ }
+
+ // disable comments and negation in Minimatch.
+ // Note that they are not supported in Glob itself anyway.
+ options.nonegate = true
+ options.nocomment = true
+ // always treat \ in patterns as escapes, not path separators
+ options.allowWindowsEscape = true
+
+ self.minimatch = new Minimatch(pattern, options)
+ self.options = self.minimatch.options
+}
+
+function finish (self) {
+ var nou = self.nounique
+ var all = nou ? [] : Object.create(null)
+
+ for (var i = 0, l = self.matches.length; i < l; i ++) {
+ var matches = self.matches[i]
+ if (!matches || Object.keys(matches).length === 0) {
+ if (self.nonull) {
+ // do like the shell, and spit out the literal glob
+ var literal = self.minimatch.globSet[i]
+ if (nou)
+ all.push(literal)
+ else
+ all[literal] = true
+ }
+ } else {
+ // had matches
+ var m = Object.keys(matches)
+ if (nou)
+ all.push.apply(all, m)
+ else
+ m.forEach(function (m) {
+ all[m] = true
+ })
+ }
+ }
+
+ if (!nou)
+ all = Object.keys(all)
+
+ if (!self.nosort)
+ all = all.sort(alphasort)
+
+ // at *some* point we statted all of these
+ if (self.mark) {
+ for (var i = 0; i < all.length; i++) {
+ all[i] = self._mark(all[i])
+ }
+ if (self.nodir) {
+ all = all.filter(function (e) {
+ var notDir = !(/\/$/.test(e))
+ var c = self.cache[e] || self.cache[makeAbs(self, e)]
+ if (notDir && c)
+ notDir = c !== 'DIR' && !Array.isArray(c)
+ return notDir
+ })
+ }
+ }
+
+ if (self.ignore.length)
+ all = all.filter(function(m) {
+ return !isIgnored(self, m)
+ })
+
+ self.found = all
+}
+
+function mark (self, p) {
+ var abs = makeAbs(self, p)
+ var c = self.cache[abs]
+ var m = p
+ if (c) {
+ var isDir = c === 'DIR' || Array.isArray(c)
+ var slash = p.slice(-1) === '/'
+
+ if (isDir && !slash)
+ m += '/'
+ else if (!isDir && slash)
+ m = m.slice(0, -1)
+
+ if (m !== p) {
+ var mabs = makeAbs(self, m)
+ self.statCache[mabs] = self.statCache[abs]
+ self.cache[mabs] = self.cache[abs]
+ }
+ }
+
+ return m
+}
+
+// lotta situps...
+function makeAbs (self, f) {
+ var abs = f
+ if (f.charAt(0) === '/') {
+ abs = path.join(self.root, f)
+ } else if (isAbsolute(f) || f === '') {
+ abs = f
+ } else if (self.changedCwd) {
+ abs = path.resolve(self.cwd, f)
+ } else {
+ abs = path.resolve(f)
+ }
+
+ if (process.platform === 'win32')
+ abs = abs.replace(/\\/g, '/')
+
+ return abs
+}
+
+
+// Return true, if pattern ends with globstar '**', for the accompanying parent directory.
+// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents
+function isIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+function childrenIgnored (self, path) {
+ if (!self.ignore.length)
+ return false
+
+ return self.ignore.some(function(item) {
+ return !!(item.gmatcher && item.gmatcher.match(path))
+ })
+}
+
+
+/***/ }),
+
+/***/ 2782:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+// Approach:
+//
+// 1. Get the minimatch set
+// 2. For each pattern in the set, PROCESS(pattern, false)
+// 3. Store matches per-set, then uniq them
+//
+// PROCESS(pattern, inGlobStar)
+// Get the first [n] items from pattern that are all strings
+// Join these together. This is PREFIX.
+// If there is no more remaining, then stat(PREFIX) and
+// add to matches if it succeeds. END.
+//
+// If inGlobStar and PREFIX is symlink and points to dir
+// set ENTRIES = []
+// else readdir(PREFIX) as ENTRIES
+// If fail, END
+//
+// with ENTRIES
+// If pattern[n] is GLOBSTAR
+// // handle the case where the globstar match is empty
+// // by pruning it out, and testing the resulting pattern
+// PROCESS(pattern[0..n] + pattern[n+1 .. $], false)
+// // handle other cases.
+// for ENTRY in ENTRIES (not dotfiles)
+// // attach globstar + tail onto the entry
+// // Mark that this entry is a globstar match
+// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true)
+//
+// else // not globstar
+// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot)
+// Test ENTRY against pattern[n]
+// If fails, continue
+// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $])
+//
+// Caveat:
+// Cache all stats and readdirs results to minimize syscall. Since all
+// we ever care about is existence and directory-ness, we can just keep
+// `true` for files, and [children,...] for directories, or `false` for
+// things that don't exist.
+
+module.exports = glob
+
+var rp = __nccwpck_require__(6863)
+var minimatch = __nccwpck_require__(7787)
+var Minimatch = minimatch.Minimatch
+var inherits = __nccwpck_require__(4124)
+var EE = (__nccwpck_require__(2361).EventEmitter)
+var path = __nccwpck_require__(1017)
+var assert = __nccwpck_require__(9491)
+var isAbsolute = (__nccwpck_require__(1017).isAbsolute)
+var globSync = __nccwpck_require__(2479)
+var common = __nccwpck_require__(150)
+var setopts = common.setopts
+var ownProp = common.ownProp
+var inflight = __nccwpck_require__(2492)
+var util = __nccwpck_require__(3837)
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+var once = __nccwpck_require__(1223)
+
+function glob (pattern, options, cb) {
+ if (typeof options === 'function') cb = options, options = {}
+ if (!options) options = {}
+
+ if (options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return globSync(pattern, options)
+ }
+
+ return new Glob(pattern, options, cb)
+}
+
+glob.sync = globSync
+var GlobSync = glob.GlobSync = globSync.GlobSync
+
+// old api surface
+glob.glob = glob
+
+function extend (origin, add) {
+ if (add === null || typeof add !== 'object') {
+ return origin
+ }
+
+ var keys = Object.keys(add)
+ var i = keys.length
+ while (i--) {
+ origin[keys[i]] = add[keys[i]]
+ }
+ return origin
+}
+
+glob.hasMagic = function (pattern, options_) {
+ var options = extend({}, options_)
+ options.noprocess = true
+
+ var g = new Glob(pattern, options)
+ var set = g.minimatch.set
+
+ if (!pattern)
+ return false
+
+ if (set.length > 1)
+ return true
+
+ for (var j = 0; j < set[0].length; j++) {
+ if (typeof set[0][j] !== 'string')
+ return true
+ }
+
+ return false
+}
+
+glob.Glob = Glob
+inherits(Glob, EE)
+function Glob (pattern, options, cb) {
+ if (typeof options === 'function') {
+ cb = options
+ options = null
+ }
+
+ if (options && options.sync) {
+ if (cb)
+ throw new TypeError('callback provided to sync glob')
+ return new GlobSync(pattern, options)
+ }
+
+ if (!(this instanceof Glob))
+ return new Glob(pattern, options, cb)
+
+ setopts(this, pattern, options)
+ this._didRealPath = false
+
+ // process each pattern in the minimatch set
+ var n = this.minimatch.set.length
+
+ // The matches are stored as {: true,...} so that
+ // duplicates are automagically pruned.
+ // Later, we do an Object.keys() on these.
+ // Keep them as a list so we can fill in when nonull is set.
+ this.matches = new Array(n)
+
+ if (typeof cb === 'function') {
+ cb = once(cb)
+ this.on('error', cb)
+ this.on('end', function (matches) {
+ cb(null, matches)
+ })
+ }
+
+ var self = this
+ this._processing = 0
+
+ this._emitQueue = []
+ this._processQueue = []
+ this.paused = false
+
+ if (this.noprocess)
+ return this
+
+ if (n === 0)
+ return done()
+
+ var sync = true
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false, done)
+ }
+ sync = false
+
+ function done () {
+ --self._processing
+ if (self._processing <= 0) {
+ if (sync) {
+ process.nextTick(function () {
+ self._finish()
+ })
+ } else {
+ self._finish()
+ }
+ }
+ }
+}
+
+Glob.prototype._finish = function () {
+ assert(this instanceof Glob)
+ if (this.aborted)
+ return
+
+ if (this.realpath && !this._didRealpath)
+ return this._realpath()
+
+ common.finish(this)
+ this.emit('end', this.found)
+}
+
+Glob.prototype._realpath = function () {
+ if (this._didRealpath)
+ return
+
+ this._didRealpath = true
+
+ var n = this.matches.length
+ if (n === 0)
+ return this._finish()
+
+ var self = this
+ for (var i = 0; i < this.matches.length; i++)
+ this._realpathSet(i, next)
+
+ function next () {
+ if (--n === 0)
+ self._finish()
+ }
+}
+
+Glob.prototype._realpathSet = function (index, cb) {
+ var matchset = this.matches[index]
+ if (!matchset)
+ return cb()
+
+ var found = Object.keys(matchset)
+ var self = this
+ var n = found.length
+
+ if (n === 0)
+ return cb()
+
+ var set = this.matches[index] = Object.create(null)
+ found.forEach(function (p, i) {
+ // If there's a problem with the stat, then it means that
+ // one or more of the links in the realpath couldn't be
+ // resolved. just return the abs value in that case.
+ p = self._makeAbs(p)
+ rp.realpath(p, self.realpathCache, function (er, real) {
+ if (!er)
+ set[real] = true
+ else if (er.syscall === 'stat')
+ set[p] = true
+ else
+ self.emit('error', er) // srsly wtf right here
+
+ if (--n === 0) {
+ self.matches[index] = set
+ cb()
+ }
+ })
+ })
+}
+
+Glob.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+Glob.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+Glob.prototype.abort = function () {
+ this.aborted = true
+ this.emit('abort')
+}
+
+Glob.prototype.pause = function () {
+ if (!this.paused) {
+ this.paused = true
+ this.emit('pause')
+ }
+}
+
+Glob.prototype.resume = function () {
+ if (this.paused) {
+ this.emit('resume')
+ this.paused = false
+ if (this._emitQueue.length) {
+ var eq = this._emitQueue.slice(0)
+ this._emitQueue.length = 0
+ for (var i = 0; i < eq.length; i ++) {
+ var e = eq[i]
+ this._emitMatch(e[0], e[1])
+ }
+ }
+ if (this._processQueue.length) {
+ var pq = this._processQueue.slice(0)
+ this._processQueue.length = 0
+ for (var i = 0; i < pq.length; i ++) {
+ var p = pq[i]
+ this._processing--
+ this._process(p[0], p[1], p[2], p[3])
+ }
+ }
+ }
+}
+
+Glob.prototype._process = function (pattern, index, inGlobStar, cb) {
+ assert(this instanceof Glob)
+ assert(typeof cb === 'function')
+
+ if (this.aborted)
+ return
+
+ this._processing++
+ if (this.paused) {
+ this._processQueue.push([pattern, index, inGlobStar, cb])
+ return
+ }
+
+ //console.error('PROCESS %d', this._processing, pattern)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // see if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index, cb)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) ||
+ isAbsolute(pattern.map(function (p) {
+ return typeof p === 'string' ? p : '[*]'
+ }).join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip _processing
+ if (childrenIgnored(this, read))
+ return cb()
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb)
+}
+
+Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return cb()
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries)
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return cb()
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return cb()
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix) {
+ if (prefix !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+ this._process([e].concat(remain), index, inGlobStar, cb)
+ }
+ cb()
+}
+
+Glob.prototype._emitMatch = function (index, e) {
+ if (this.aborted)
+ return
+
+ if (isIgnored(this, e))
+ return
+
+ if (this.paused) {
+ this._emitQueue.push([index, e])
+ return
+ }
+
+ var abs = isAbsolute(e) ? e : this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute)
+ e = abs
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ var st = this.statCache[abs]
+ if (st)
+ this.emit('stat', e, st)
+
+ this.emit('match', e)
+}
+
+Glob.prototype._readdirInGlobStar = function (abs, cb) {
+ if (this.aborted)
+ return
+
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false, cb)
+
+ var lstatkey = 'lstat\0' + abs
+ var self = this
+ var lstatcb = inflight(lstatkey, lstatcb_)
+
+ if (lstatcb)
+ self.fs.lstat(abs, lstatcb)
+
+ function lstatcb_ (er, lstat) {
+ if (er && er.code === 'ENOENT')
+ return cb()
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ self.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory()) {
+ self.cache[abs] = 'FILE'
+ cb()
+ } else
+ self._readdir(abs, false, cb)
+ }
+}
+
+Glob.prototype._readdir = function (abs, inGlobStar, cb) {
+ if (this.aborted)
+ return
+
+ cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb)
+ if (!cb)
+ return
+
+ //console.error('RD %j %j', +inGlobStar, abs)
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs, cb)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return cb()
+
+ if (Array.isArray(c))
+ return cb(null, c)
+ }
+
+ var self = this
+ self.fs.readdir(abs, readdirCb(this, abs, cb))
+}
+
+function readdirCb (self, abs, cb) {
+ return function (er, entries) {
+ if (er)
+ self._readdirError(abs, er, cb)
+ else
+ self._readdirEntries(abs, entries, cb)
+ }
+}
+
+Glob.prototype._readdirEntries = function (abs, entries, cb) {
+ if (this.aborted)
+ return
+
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+ return cb(null, entries)
+}
+
+Glob.prototype._readdirError = function (f, er, cb) {
+ if (this.aborted)
+ return
+
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ this.emit('error', error)
+ this.abort()
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict) {
+ this.emit('error', er)
+ // If the error is handled, then we abort
+ // if not, we threw out of here
+ this.abort()
+ }
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+
+ return cb()
+}
+
+Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) {
+ var self = this
+ this._readdir(abs, inGlobStar, function (er, entries) {
+ self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb)
+ })
+}
+
+
+Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) {
+ //console.error('pgs2', prefix, remain[0], entries)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return cb()
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false, cb)
+
+ var isSym = this.symlinks[abs]
+ var len = entries.length
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return cb()
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true, cb)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true, cb)
+ }
+
+ cb()
+}
+
+Glob.prototype._processSimple = function (prefix, index, cb) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var self = this
+ this._stat(prefix, function (er, exists) {
+ self._processSimple2(prefix, index, er, exists, cb)
+ })
+}
+Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) {
+
+ //console.error('ps2', prefix, exists)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return cb()
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+ cb()
+}
+
+// Returns either 'DIR', 'FILE', or false
+Glob.prototype._stat = function (f, cb) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return cb()
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return cb(null, c)
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (stat !== undefined) {
+ if (stat === false)
+ return cb(null, stat)
+ else {
+ var type = stat.isDirectory() ? 'DIR' : 'FILE'
+ if (needDir && type === 'FILE')
+ return cb()
+ else
+ return cb(null, type, stat)
+ }
+ }
+
+ var self = this
+ var statcb = inflight('stat\0' + abs, lstatcb_)
+ if (statcb)
+ self.fs.lstat(abs, statcb)
+
+ function lstatcb_ (er, lstat) {
+ if (lstat && lstat.isSymbolicLink()) {
+ // If it's a symlink, then treat it as the target, unless
+ // the target does not exist, then treat it as a file.
+ return self.fs.stat(abs, function (er, stat) {
+ if (er)
+ self._stat2(f, abs, null, lstat, cb)
+ else
+ self._stat2(f, abs, er, stat, cb)
+ })
+ } else {
+ self._stat2(f, abs, er, lstat, cb)
+ }
+ }
+}
+
+Glob.prototype._stat2 = function (f, abs, er, stat, cb) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return cb()
+ }
+
+ var needDir = f.slice(-1) === '/'
+ this.statCache[abs] = stat
+
+ if (abs.slice(-1) === '/' && stat && !stat.isDirectory())
+ return cb(null, false, stat)
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return cb()
+
+ return cb(null, c, stat)
+}
+
+
+/***/ }),
+
+/***/ 2479:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = globSync
+globSync.GlobSync = GlobSync
+
+var rp = __nccwpck_require__(6863)
+var minimatch = __nccwpck_require__(7787)
+var Minimatch = minimatch.Minimatch
+var Glob = (__nccwpck_require__(2782).Glob)
+var util = __nccwpck_require__(3837)
+var path = __nccwpck_require__(1017)
+var assert = __nccwpck_require__(9491)
+var isAbsolute = (__nccwpck_require__(1017).isAbsolute)
+var common = __nccwpck_require__(150)
+var setopts = common.setopts
+var ownProp = common.ownProp
+var childrenIgnored = common.childrenIgnored
+var isIgnored = common.isIgnored
+
+function globSync (pattern, options) {
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ return new GlobSync(pattern, options).found
+}
+
+function GlobSync (pattern, options) {
+ if (!pattern)
+ throw new Error('must provide pattern')
+
+ if (typeof options === 'function' || arguments.length === 3)
+ throw new TypeError('callback provided to sync glob\n'+
+ 'See: https://github.com/isaacs/node-glob/issues/167')
+
+ if (!(this instanceof GlobSync))
+ return new GlobSync(pattern, options)
+
+ setopts(this, pattern, options)
+
+ if (this.noprocess)
+ return this
+
+ var n = this.minimatch.set.length
+ this.matches = new Array(n)
+ for (var i = 0; i < n; i ++) {
+ this._process(this.minimatch.set[i], i, false)
+ }
+ this._finish()
+}
+
+GlobSync.prototype._finish = function () {
+ assert.ok(this instanceof GlobSync)
+ if (this.realpath) {
+ var self = this
+ this.matches.forEach(function (matchset, index) {
+ var set = self.matches[index] = Object.create(null)
+ for (var p in matchset) {
+ try {
+ p = self._makeAbs(p)
+ var real = rp.realpathSync(p, self.realpathCache)
+ set[real] = true
+ } catch (er) {
+ if (er.syscall === 'stat')
+ set[self._makeAbs(p)] = true
+ else
+ throw er
+ }
+ }
+ })
+ }
+ common.finish(this)
+}
+
+
+GlobSync.prototype._process = function (pattern, index, inGlobStar) {
+ assert.ok(this instanceof GlobSync)
+
+ // Get the first [n] parts of pattern that are all strings.
+ var n = 0
+ while (typeof pattern[n] === 'string') {
+ n ++
+ }
+ // now n is the index of the first one that is *not* a string.
+
+ // See if there's anything else
+ var prefix
+ switch (n) {
+ // if not, then this is rather simple
+ case pattern.length:
+ this._processSimple(pattern.join('/'), index)
+ return
+
+ case 0:
+ // pattern *starts* with some non-trivial item.
+ // going to readdir(cwd), but not include the prefix in matches.
+ prefix = null
+ break
+
+ default:
+ // pattern has some string bits in the front.
+ // whatever it starts with, whether that's 'absolute' like /foo/bar,
+ // or 'relative' like '../baz'
+ prefix = pattern.slice(0, n).join('/')
+ break
+ }
+
+ var remain = pattern.slice(n)
+
+ // get the list of entries.
+ var read
+ if (prefix === null)
+ read = '.'
+ else if (isAbsolute(prefix) ||
+ isAbsolute(pattern.map(function (p) {
+ return typeof p === 'string' ? p : '[*]'
+ }).join('/'))) {
+ if (!prefix || !isAbsolute(prefix))
+ prefix = '/' + prefix
+ read = prefix
+ } else
+ read = prefix
+
+ var abs = this._makeAbs(read)
+
+ //if ignored, skip processing
+ if (childrenIgnored(this, read))
+ return
+
+ var isGlobStar = remain[0] === minimatch.GLOBSTAR
+ if (isGlobStar)
+ this._processGlobStar(prefix, read, abs, remain, index, inGlobStar)
+ else
+ this._processReaddir(prefix, read, abs, remain, index, inGlobStar)
+}
+
+
+GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) {
+ var entries = this._readdir(abs, inGlobStar)
+
+ // if the abs isn't a dir, then nothing can match!
+ if (!entries)
+ return
+
+ // It will only match dot entries if it starts with a dot, or if
+ // dot is set. Stuff like @(.foo|.bar) isn't allowed.
+ var pn = remain[0]
+ var negate = !!this.minimatch.negate
+ var rawGlob = pn._glob
+ var dotOk = this.dot || rawGlob.charAt(0) === '.'
+
+ var matchedEntries = []
+ for (var i = 0; i < entries.length; i++) {
+ var e = entries[i]
+ if (e.charAt(0) !== '.' || dotOk) {
+ var m
+ if (negate && !prefix) {
+ m = !e.match(pn)
+ } else {
+ m = e.match(pn)
+ }
+ if (m)
+ matchedEntries.push(e)
+ }
+ }
+
+ var len = matchedEntries.length
+ // If there are no matched entries, then nothing matches.
+ if (len === 0)
+ return
+
+ // if this is the last remaining pattern bit, then no need for
+ // an additional stat *unless* the user has specified mark or
+ // stat explicitly. We know they exist, since readdir returned
+ // them.
+
+ if (remain.length === 1 && !this.mark && !this.stat) {
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ if (prefix) {
+ if (prefix.slice(-1) !== '/')
+ e = prefix + '/' + e
+ else
+ e = prefix + e
+ }
+
+ if (e.charAt(0) === '/' && !this.nomount) {
+ e = path.join(this.root, e)
+ }
+ this._emitMatch(index, e)
+ }
+ // This was the last one, and no stats were needed
+ return
+ }
+
+ // now test all matched entries as stand-ins for that part
+ // of the pattern.
+ remain.shift()
+ for (var i = 0; i < len; i ++) {
+ var e = matchedEntries[i]
+ var newPattern
+ if (prefix)
+ newPattern = [prefix, e]
+ else
+ newPattern = [e]
+ this._process(newPattern.concat(remain), index, inGlobStar)
+ }
+}
+
+
+GlobSync.prototype._emitMatch = function (index, e) {
+ if (isIgnored(this, e))
+ return
+
+ var abs = this._makeAbs(e)
+
+ if (this.mark)
+ e = this._mark(e)
+
+ if (this.absolute) {
+ e = abs
+ }
+
+ if (this.matches[index][e])
+ return
+
+ if (this.nodir) {
+ var c = this.cache[abs]
+ if (c === 'DIR' || Array.isArray(c))
+ return
+ }
+
+ this.matches[index][e] = true
+
+ if (this.stat)
+ this._stat(e)
+}
+
+
+GlobSync.prototype._readdirInGlobStar = function (abs) {
+ // follow all symlinked directories forever
+ // just proceed as if this is a non-globstar situation
+ if (this.follow)
+ return this._readdir(abs, false)
+
+ var entries
+ var lstat
+ var stat
+ try {
+ lstat = this.fs.lstatSync(abs)
+ } catch (er) {
+ if (er.code === 'ENOENT') {
+ // lstat failed, doesn't exist
+ return null
+ }
+ }
+
+ var isSym = lstat && lstat.isSymbolicLink()
+ this.symlinks[abs] = isSym
+
+ // If it's not a symlink or a dir, then it's definitely a regular file.
+ // don't bother doing a readdir in that case.
+ if (!isSym && lstat && !lstat.isDirectory())
+ this.cache[abs] = 'FILE'
+ else
+ entries = this._readdir(abs, false)
+
+ return entries
+}
+
+GlobSync.prototype._readdir = function (abs, inGlobStar) {
+ var entries
+
+ if (inGlobStar && !ownProp(this.symlinks, abs))
+ return this._readdirInGlobStar(abs)
+
+ if (ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+ if (!c || c === 'FILE')
+ return null
+
+ if (Array.isArray(c))
+ return c
+ }
+
+ try {
+ return this._readdirEntries(abs, this.fs.readdirSync(abs))
+ } catch (er) {
+ this._readdirError(abs, er)
+ return null
+ }
+}
+
+GlobSync.prototype._readdirEntries = function (abs, entries) {
+ // if we haven't asked to stat everything, then just
+ // assume that everything in there exists, so we can avoid
+ // having to stat it a second time.
+ if (!this.mark && !this.stat) {
+ for (var i = 0; i < entries.length; i ++) {
+ var e = entries[i]
+ if (abs === '/')
+ e = abs + e
+ else
+ e = abs + '/' + e
+ this.cache[e] = true
+ }
+ }
+
+ this.cache[abs] = entries
+
+ // mark and cache dir-ness
+ return entries
+}
+
+GlobSync.prototype._readdirError = function (f, er) {
+ // handle errors, and cache the information
+ switch (er.code) {
+ case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205
+ case 'ENOTDIR': // totally normal. means it *does* exist.
+ var abs = this._makeAbs(f)
+ this.cache[abs] = 'FILE'
+ if (abs === this.cwdAbs) {
+ var error = new Error(er.code + ' invalid cwd ' + this.cwd)
+ error.path = this.cwd
+ error.code = er.code
+ throw error
+ }
+ break
+
+ case 'ENOENT': // not terribly unusual
+ case 'ELOOP':
+ case 'ENAMETOOLONG':
+ case 'UNKNOWN':
+ this.cache[this._makeAbs(f)] = false
+ break
+
+ default: // some unusual error. Treat as failure.
+ this.cache[this._makeAbs(f)] = false
+ if (this.strict)
+ throw er
+ if (!this.silent)
+ console.error('glob error', er)
+ break
+ }
+}
+
+GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) {
+
+ var entries = this._readdir(abs, inGlobStar)
+
+ // no entries means not a dir, so it can never have matches
+ // foo.txt/** doesn't match foo.txt
+ if (!entries)
+ return
+
+ // test without the globstar, and with every child both below
+ // and replacing the globstar.
+ var remainWithoutGlobStar = remain.slice(1)
+ var gspref = prefix ? [ prefix ] : []
+ var noGlobStar = gspref.concat(remainWithoutGlobStar)
+
+ // the noGlobStar pattern exits the inGlobStar state
+ this._process(noGlobStar, index, false)
+
+ var len = entries.length
+ var isSym = this.symlinks[abs]
+
+ // If it's a symlink, and we're in a globstar, then stop
+ if (isSym && inGlobStar)
+ return
+
+ for (var i = 0; i < len; i++) {
+ var e = entries[i]
+ if (e.charAt(0) === '.' && !this.dot)
+ continue
+
+ // these two cases enter the inGlobStar state
+ var instead = gspref.concat(entries[i], remainWithoutGlobStar)
+ this._process(instead, index, true)
+
+ var below = gspref.concat(entries[i], remain)
+ this._process(below, index, true)
+ }
+}
+
+GlobSync.prototype._processSimple = function (prefix, index) {
+ // XXX review this. Shouldn't it be doing the mounting etc
+ // before doing stat? kinda weird?
+ var exists = this._stat(prefix)
+
+ if (!this.matches[index])
+ this.matches[index] = Object.create(null)
+
+ // If it doesn't exist, then just mark the lack of results
+ if (!exists)
+ return
+
+ if (prefix && isAbsolute(prefix) && !this.nomount) {
+ var trail = /[\/\\]$/.test(prefix)
+ if (prefix.charAt(0) === '/') {
+ prefix = path.join(this.root, prefix)
+ } else {
+ prefix = path.resolve(this.root, prefix)
+ if (trail)
+ prefix += '/'
+ }
+ }
+
+ if (process.platform === 'win32')
+ prefix = prefix.replace(/\\/g, '/')
+
+ // Mark this as a match
+ this._emitMatch(index, prefix)
+}
+
+// Returns either 'DIR', 'FILE', or false
+GlobSync.prototype._stat = function (f) {
+ var abs = this._makeAbs(f)
+ var needDir = f.slice(-1) === '/'
+
+ if (f.length > this.maxLength)
+ return false
+
+ if (!this.stat && ownProp(this.cache, abs)) {
+ var c = this.cache[abs]
+
+ if (Array.isArray(c))
+ c = 'DIR'
+
+ // It exists, but maybe not how we need it
+ if (!needDir || c === 'DIR')
+ return c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ // otherwise we have to stat, because maybe c=true
+ // if we know it exists, but not what it is.
+ }
+
+ var exists
+ var stat = this.statCache[abs]
+ if (!stat) {
+ var lstat
+ try {
+ lstat = this.fs.lstatSync(abs)
+ } catch (er) {
+ if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) {
+ this.statCache[abs] = false
+ return false
+ }
+ }
+
+ if (lstat && lstat.isSymbolicLink()) {
+ try {
+ stat = this.fs.statSync(abs)
+ } catch (er) {
+ stat = lstat
+ }
+ } else {
+ stat = lstat
+ }
+ }
+
+ this.statCache[abs] = stat
+
+ var c = true
+ if (stat)
+ c = stat.isDirectory() ? 'DIR' : 'FILE'
+
+ this.cache[abs] = this.cache[abs] || c
+
+ if (needDir && c === 'FILE')
+ return false
+
+ return c
+}
+
+GlobSync.prototype._mark = function (p) {
+ return common.mark(this, p)
+}
+
+GlobSync.prototype._makeAbs = function (f) {
+ return common.makeAbs(this, f)
+}
+
+
+/***/ }),
+
+/***/ 6798:
+/***/ ((module) => {
+
+const isWindows = typeof process === 'object' &&
+ process &&
+ process.platform === 'win32'
+module.exports = isWindows ? { sep: '\\' } : { sep: '/' }
+
+
+/***/ }),
+
+/***/ 7787:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const minimatch = module.exports = (p, pattern, options = {}) => {
+ assertValidPattern(pattern)
+
+ // shortcut: comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ return false
+ }
+
+ return new Minimatch(pattern, options).match(p)
+}
+
+module.exports = minimatch
+
+const path = __nccwpck_require__(6798)
+minimatch.sep = path.sep
+
+const GLOBSTAR = Symbol('globstar **')
+minimatch.GLOBSTAR = GLOBSTAR
+const expand = __nccwpck_require__(6589)
+
+const plTypes = {
+ '!': { open: '(?:(?!(?:', close: '))[^/]*?)'},
+ '?': { open: '(?:', close: ')?' },
+ '+': { open: '(?:', close: ')+' },
+ '*': { open: '(?:', close: ')*' },
+ '@': { open: '(?:', close: ')' }
+}
+
+// any single thing other than /
+// don't need to escape / when using new RegExp()
+const qmark = '[^/]'
+
+// * => any number of characters
+const star = qmark + '*?'
+
+// ** when dots are allowed. Anything goes, except .. and .
+// not (^ or / followed by one or two dots followed by $ or /),
+// followed by anything, any number of times.
+const twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?'
+
+// not a ^ or / followed by a dot,
+// followed by anything, any number of times.
+const twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?'
+
+// "abc" -> { a:true, b:true, c:true }
+const charSet = s => s.split('').reduce((set, c) => {
+ set[c] = true
+ return set
+}, {})
+
+// characters that need to be escaped in RegExp.
+const reSpecials = charSet('().*{}+?[]^$\\!')
+
+// characters that indicate we have to add the pattern start
+const addPatternStartSet = charSet('[.(')
+
+// normalizes slashes.
+const slashSplit = /\/+/
+
+minimatch.filter = (pattern, options = {}) =>
+ (p, i, list) => minimatch(p, pattern, options)
+
+const ext = (a, b = {}) => {
+ const t = {}
+ Object.keys(a).forEach(k => t[k] = a[k])
+ Object.keys(b).forEach(k => t[k] = b[k])
+ return t
+}
+
+minimatch.defaults = def => {
+ if (!def || typeof def !== 'object' || !Object.keys(def).length) {
+ return minimatch
+ }
+
+ const orig = minimatch
+
+ const m = (p, pattern, options) => orig(p, pattern, ext(def, options))
+ m.Minimatch = class Minimatch extends orig.Minimatch {
+ constructor (pattern, options) {
+ super(pattern, ext(def, options))
+ }
+ }
+ m.Minimatch.defaults = options => orig.defaults(ext(def, options)).Minimatch
+ m.filter = (pattern, options) => orig.filter(pattern, ext(def, options))
+ m.defaults = options => orig.defaults(ext(def, options))
+ m.makeRe = (pattern, options) => orig.makeRe(pattern, ext(def, options))
+ m.braceExpand = (pattern, options) => orig.braceExpand(pattern, ext(def, options))
+ m.match = (list, pattern, options) => orig.match(list, pattern, ext(def, options))
+
+ return m
+}
+
+
+
+
+
+// Brace expansion:
+// a{b,c}d -> abd acd
+// a{b,}c -> abc ac
+// a{0..3}d -> a0d a1d a2d a3d
+// a{b,c{d,e}f}g -> abg acdfg acefg
+// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg
+//
+// Invalid sets are not expanded.
+// a{2..}b -> a{2..}b
+// a{b}c -> a{b}c
+minimatch.braceExpand = (pattern, options) => braceExpand(pattern, options)
+
+const braceExpand = (pattern, options = {}) => {
+ assertValidPattern(pattern)
+
+ // Thanks to Yeting Li for
+ // improving this regexp to avoid a ReDOS vulnerability.
+ if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) {
+ // shortcut. no need to expand.
+ return [pattern]
+ }
+
+ return expand(pattern)
+}
+
+const MAX_PATTERN_LENGTH = 1024 * 64
+const assertValidPattern = pattern => {
+ if (typeof pattern !== 'string') {
+ throw new TypeError('invalid pattern')
+ }
+
+ if (pattern.length > MAX_PATTERN_LENGTH) {
+ throw new TypeError('pattern is too long')
+ }
+}
+
+// parse a component of the expanded set.
+// At this point, no pattern may contain "/" in it
+// so we're going to return a 2d array, where each entry is the full
+// pattern, split on '/', and then turned into a regular expression.
+// A regexp is made at the end which joins each array with an
+// escaped /, and another full one which joins each regexp with |.
+//
+// Following the lead of Bash 4.1, note that "**" only has special meaning
+// when it is the *only* thing in a path portion. Otherwise, any series
+// of * is equivalent to a single *. Globstar behavior is enabled by
+// default, and can be disabled by setting options.noglobstar.
+const SUBPARSE = Symbol('subparse')
+
+minimatch.makeRe = (pattern, options) =>
+ new Minimatch(pattern, options || {}).makeRe()
+
+minimatch.match = (list, pattern, options = {}) => {
+ const mm = new Minimatch(pattern, options)
+ list = list.filter(f => mm.match(f))
+ if (mm.options.nonull && !list.length) {
+ list.push(pattern)
+ }
+ return list
+}
+
+// replace stuff like \* with *
+const globUnescape = s => s.replace(/\\(.)/g, '$1')
+const regExpEscape = s => s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&')
+
+class Minimatch {
+ constructor (pattern, options) {
+ assertValidPattern(pattern)
+
+ if (!options) options = {}
+
+ this.options = options
+ this.set = []
+ this.pattern = pattern
+ this.windowsPathsNoEscape = !!options.windowsPathsNoEscape ||
+ options.allowWindowsEscape === false
+ if (this.windowsPathsNoEscape) {
+ this.pattern = this.pattern.replace(/\\/g, '/')
+ }
+ this.regexp = null
+ this.negate = false
+ this.comment = false
+ this.empty = false
+ this.partial = !!options.partial
+
+ // make the set of regexps etc.
+ this.make()
+ }
+
+ debug () {}
+
+ make () {
+ const pattern = this.pattern
+ const options = this.options
+
+ // empty patterns and comments match nothing.
+ if (!options.nocomment && pattern.charAt(0) === '#') {
+ this.comment = true
+ return
+ }
+ if (!pattern) {
+ this.empty = true
+ return
+ }
+
+ // step 1: figure out negation, etc.
+ this.parseNegate()
+
+ // step 2: expand braces
+ let set = this.globSet = this.braceExpand()
+
+ if (options.debug) this.debug = (...args) => console.error(...args)
+
+ this.debug(this.pattern, set)
+
+ // step 3: now we have a set, so turn each one into a series of path-portion
+ // matching patterns.
+ // These will be regexps, except in the case of "**", which is
+ // set to the GLOBSTAR object for globstar behavior,
+ // and will not contain any / characters
+ set = this.globParts = set.map(s => s.split(slashSplit))
+
+ this.debug(this.pattern, set)
+
+ // glob --> regexps
+ set = set.map((s, si, set) => s.map(this.parse, this))
+
+ this.debug(this.pattern, set)
+
+ // filter out everything that didn't compile properly.
+ set = set.filter(s => s.indexOf(false) === -1)
+
+ this.debug(this.pattern, set)
+
+ this.set = set
+ }
+
+ parseNegate () {
+ if (this.options.nonegate) return
+
+ const pattern = this.pattern
+ let negate = false
+ let negateOffset = 0
+
+ for (let i = 0; i < pattern.length && pattern.charAt(i) === '!'; i++) {
+ negate = !negate
+ negateOffset++
+ }
+
+ if (negateOffset) this.pattern = pattern.slice(negateOffset)
+ this.negate = negate
+ }
+
+ // set partial to true to test if, for example,
+ // "/a/b" matches the start of "/*/b/*/d"
+ // Partial means, if you run out of file before you run
+ // out of pattern, then that's fine, as long as all
+ // the parts match.
+ matchOne (file, pattern, partial) {
+ var options = this.options
+
+ this.debug('matchOne',
+ { 'this': this, file: file, pattern: pattern })
+
+ this.debug('matchOne', file.length, pattern.length)
+
+ for (var fi = 0,
+ pi = 0,
+ fl = file.length,
+ pl = pattern.length
+ ; (fi < fl) && (pi < pl)
+ ; fi++, pi++) {
+ this.debug('matchOne loop')
+ var p = pattern[pi]
+ var f = file[fi]
+
+ this.debug(pattern, p, f)
+
+ // should be impossible.
+ // some invalid regexp stuff in the set.
+ /* istanbul ignore if */
+ if (p === false) return false
+
+ if (p === GLOBSTAR) {
+ this.debug('GLOBSTAR', [pattern, p, f])
+
+ // "**"
+ // a/**/b/**/c would match the following:
+ // a/b/x/y/z/c
+ // a/x/y/z/b/c
+ // a/b/x/b/x/c
+ // a/b/c
+ // To do this, take the rest of the pattern after
+ // the **, and see if it would match the file remainder.
+ // If so, return success.
+ // If not, the ** "swallows" a segment, and try again.
+ // This is recursively awful.
+ //
+ // a/**/b/**/c matching a/b/x/y/z/c
+ // - a matches a
+ // - doublestar
+ // - matchOne(b/x/y/z/c, b/**/c)
+ // - b matches b
+ // - doublestar
+ // - matchOne(x/y/z/c, c) -> no
+ // - matchOne(y/z/c, c) -> no
+ // - matchOne(z/c, c) -> no
+ // - matchOne(c, c) yes, hit
+ var fr = fi
+ var pr = pi + 1
+ if (pr === pl) {
+ this.debug('** at the end')
+ // a ** at the end will just swallow the rest.
+ // We have found a match.
+ // however, it will not swallow /.x, unless
+ // options.dot is set.
+ // . and .. are *never* matched by **, for explosively
+ // exponential reasons.
+ for (; fi < fl; fi++) {
+ if (file[fi] === '.' || file[fi] === '..' ||
+ (!options.dot && file[fi].charAt(0) === '.')) return false
+ }
+ return true
+ }
+
+ // ok, let's see if we can swallow whatever we can.
+ while (fr < fl) {
+ var swallowee = file[fr]
+
+ this.debug('\nglobstar while', file, fr, pattern, pr, swallowee)
+
+ // XXX remove this slice. Just pass the start index.
+ if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) {
+ this.debug('globstar found match!', fr, fl, swallowee)
+ // found a match.
+ return true
+ } else {
+ // can't swallow "." or ".." ever.
+ // can only swallow ".foo" when explicitly asked.
+ if (swallowee === '.' || swallowee === '..' ||
+ (!options.dot && swallowee.charAt(0) === '.')) {
+ this.debug('dot detected!', file, fr, pattern, pr)
+ break
+ }
+
+ // ** swallows a segment, and continue.
+ this.debug('globstar swallow a segment, and continue')
+ fr++
+ }
+ }
+
+ // no match was found.
+ // However, in partial mode, we can't say this is necessarily over.
+ // If there's more *pattern* left, then
+ /* istanbul ignore if */
+ if (partial) {
+ // ran out of file
+ this.debug('\n>>> no match, partial?', file, fr, pattern, pr)
+ if (fr === fl) return true
+ }
+ return false
+ }
+
+ // something other than **
+ // non-magic patterns just have to match exactly
+ // patterns with magic have been turned into regexps.
+ var hit
+ if (typeof p === 'string') {
+ hit = f === p
+ this.debug('string match', p, f, hit)
+ } else {
+ hit = f.match(p)
+ this.debug('pattern match', p, f, hit)
+ }
+
+ if (!hit) return false
+ }
+
+ // Note: ending in / means that we'll get a final ""
+ // at the end of the pattern. This can only match a
+ // corresponding "" at the end of the file.
+ // If the file ends in /, then it can only match a
+ // a pattern that ends in /, unless the pattern just
+ // doesn't have any more for it. But, a/b/ should *not*
+ // match "a/b/*", even though "" matches against the
+ // [^/]*? pattern, except in partial mode, where it might
+ // simply not be reached yet.
+ // However, a/b/ should still satisfy a/*
+
+ // now either we fell off the end of the pattern, or we're done.
+ if (fi === fl && pi === pl) {
+ // ran out of pattern and filename at the same time.
+ // an exact hit!
+ return true
+ } else if (fi === fl) {
+ // ran out of file, but still had pattern left.
+ // this is ok if we're doing the match as part of
+ // a glob fs traversal.
+ return partial
+ } else /* istanbul ignore else */ if (pi === pl) {
+ // ran out of pattern, still have file left.
+ // this is only acceptable if we're on the very last
+ // empty segment of a file with a trailing slash.
+ // a/* should match a/b/
+ return (fi === fl - 1) && (file[fi] === '')
+ }
+
+ // should be unreachable.
+ /* istanbul ignore next */
+ throw new Error('wtf?')
+ }
+
+ braceExpand () {
+ return braceExpand(this.pattern, this.options)
+ }
+
+ parse (pattern, isSub) {
+ assertValidPattern(pattern)
+
+ const options = this.options
+
+ // shortcuts
+ if (pattern === '**') {
+ if (!options.noglobstar)
+ return GLOBSTAR
+ else
+ pattern = '*'
+ }
+ if (pattern === '') return ''
+
+ let re = ''
+ let hasMagic = !!options.nocase
+ let escaping = false
+ // ? => one single character
+ const patternListStack = []
+ const negativeLists = []
+ let stateChar
+ let inClass = false
+ let reClassStart = -1
+ let classStart = -1
+ let cs
+ let pl
+ let sp
+ // . and .. never match anything that doesn't start with .,
+ // even when options.dot is set.
+ const patternStart = pattern.charAt(0) === '.' ? '' // anything
+ // not (start or / followed by . or .. followed by / or end)
+ : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))'
+ : '(?!\\.)'
+
+ const clearStateChar = () => {
+ if (stateChar) {
+ // we had some state-tracking character
+ // that wasn't consumed by this pass.
+ switch (stateChar) {
+ case '*':
+ re += star
+ hasMagic = true
+ break
+ case '?':
+ re += qmark
+ hasMagic = true
+ break
+ default:
+ re += '\\' + stateChar
+ break
+ }
+ this.debug('clearStateChar %j %j', stateChar, re)
+ stateChar = false
+ }
+ }
+
+ for (let i = 0, c; (i < pattern.length) && (c = pattern.charAt(i)); i++) {
+ this.debug('%s\t%s %s %j', pattern, i, re, c)
+
+ // skip over any that are escaped.
+ if (escaping) {
+ /* istanbul ignore next - completely not allowed, even escaped. */
+ if (c === '/') {
+ return false
+ }
+
+ if (reSpecials[c]) {
+ re += '\\'
+ }
+ re += c
+ escaping = false
+ continue
+ }
+
+ switch (c) {
+ /* istanbul ignore next */
+ case '/': {
+ // Should already be path-split by now.
+ return false
+ }
+
+ case '\\':
+ clearStateChar()
+ escaping = true
+ continue
+
+ // the various stateChar values
+ // for the "extglob" stuff.
+ case '?':
+ case '*':
+ case '+':
+ case '@':
+ case '!':
+ this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c)
+
+ // all of those are literals inside a class, except that
+ // the glob [!a] means [^a] in regexp
+ if (inClass) {
+ this.debug(' in class')
+ if (c === '!' && i === classStart + 1) c = '^'
+ re += c
+ continue
+ }
+
+ // if we already have a stateChar, then it means
+ // that there was something like ** or +? in there.
+ // Handle the stateChar, then proceed with this one.
+ this.debug('call clearStateChar %j', stateChar)
+ clearStateChar()
+ stateChar = c
+ // if extglob is disabled, then +(asdf|foo) isn't a thing.
+ // just clear the statechar *now*, rather than even diving into
+ // the patternList stuff.
+ if (options.noext) clearStateChar()
+ continue
+
+ case '(':
+ if (inClass) {
+ re += '('
+ continue
+ }
+
+ if (!stateChar) {
+ re += '\\('
+ continue
+ }
+
+ patternListStack.push({
+ type: stateChar,
+ start: i - 1,
+ reStart: re.length,
+ open: plTypes[stateChar].open,
+ close: plTypes[stateChar].close
+ })
+ // negation is (?:(?!js)[^/]*)
+ re += stateChar === '!' ? '(?:(?!(?:' : '(?:'
+ this.debug('plType %j %j', stateChar, re)
+ stateChar = false
+ continue
+
+ case ')':
+ if (inClass || !patternListStack.length) {
+ re += '\\)'
+ continue
+ }
+
+ clearStateChar()
+ hasMagic = true
+ pl = patternListStack.pop()
+ // negation is (?:(?!js)[^/]*)
+ // The others are (?:)
+ re += pl.close
+ if (pl.type === '!') {
+ negativeLists.push(pl)
+ }
+ pl.reEnd = re.length
+ continue
+
+ case '|':
+ if (inClass || !patternListStack.length) {
+ re += '\\|'
+ continue
+ }
+
+ clearStateChar()
+ re += '|'
+ continue
+
+ // these are mostly the same in regexp and glob
+ case '[':
+ // swallow any state-tracking char before the [
+ clearStateChar()
+
+ if (inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ inClass = true
+ classStart = i
+ reClassStart = re.length
+ re += c
+ continue
+
+ case ']':
+ // a right bracket shall lose its special
+ // meaning and represent itself in
+ // a bracket expression if it occurs
+ // first in the list. -- POSIX.2 2.8.3.2
+ if (i === classStart + 1 || !inClass) {
+ re += '\\' + c
+ continue
+ }
+
+ // handle the case where we left a class open.
+ // "[z-a]" is valid, equivalent to "\[z-a\]"
+ // split where the last [ was, make sure we don't have
+ // an invalid re. if so, re-walk the contents of the
+ // would-be class to re-translate any characters that
+ // were passed through as-is
+ // TODO: It would probably be faster to determine this
+ // without a try/catch and a new RegExp, but it's tricky
+ // to do safely. For now, this is safe and works.
+ cs = pattern.substring(classStart + 1, i)
+ try {
+ RegExp('[' + cs + ']')
+ } catch (er) {
+ // not a valid class!
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substring(0, reClassStart) + '\\[' + sp[0] + '\\]'
+ hasMagic = hasMagic || sp[1]
+ inClass = false
+ continue
+ }
+
+ // finish up the class.
+ hasMagic = true
+ inClass = false
+ re += c
+ continue
+
+ default:
+ // swallow any state char that wasn't consumed
+ clearStateChar()
+
+ if (reSpecials[c] && !(c === '^' && inClass)) {
+ re += '\\'
+ }
+
+ re += c
+ break
+
+ } // switch
+ } // for
+
+ // handle the case where we left a class open.
+ // "[abc" is valid, equivalent to "\[abc"
+ if (inClass) {
+ // split where the last [ was, and escape it
+ // this is a huge pita. We now have to re-walk
+ // the contents of the would-be class to re-translate
+ // any characters that were passed through as-is
+ cs = pattern.slice(classStart + 1)
+ sp = this.parse(cs, SUBPARSE)
+ re = re.substring(0, reClassStart) + '\\[' + sp[0]
+ hasMagic = hasMagic || sp[1]
+ }
+
+ // handle the case where we had a +( thing at the *end*
+ // of the pattern.
+ // each pattern list stack adds 3 chars, and we need to go through
+ // and escape any | chars that were passed through as-is for the regexp.
+ // Go through and escape them, taking care not to double-escape any
+ // | chars that were already escaped.
+ for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) {
+ let tail
+ tail = re.slice(pl.reStart + pl.open.length)
+ this.debug('setting tail', re, pl)
+ // maybe some even number of \, then maybe 1 \, followed by a |
+ tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, (_, $1, $2) => {
+ /* istanbul ignore else - should already be done */
+ if (!$2) {
+ // the | isn't already escaped, so escape it.
+ $2 = '\\'
+ }
+
+ // need to escape all those slashes *again*, without escaping the
+ // one that we need for escaping the | character. As it works out,
+ // escaping an even number of slashes can be done by simply repeating
+ // it exactly after itself. That's why this trick works.
+ //
+ // I am sorry that you have to see this.
+ return $1 + $1 + $2 + '|'
+ })
+
+ this.debug('tail=%j\n %s', tail, tail, pl, re)
+ const t = pl.type === '*' ? star
+ : pl.type === '?' ? qmark
+ : '\\' + pl.type
+
+ hasMagic = true
+ re = re.slice(0, pl.reStart) + t + '\\(' + tail
+ }
+
+ // handle trailing things that only matter at the very end.
+ clearStateChar()
+ if (escaping) {
+ // trailing \\
+ re += '\\\\'
+ }
+
+ // only need to apply the nodot start if the re starts with
+ // something that could conceivably capture a dot
+ const addPatternStart = addPatternStartSet[re.charAt(0)]
+
+ // Hack to work around lack of negative lookbehind in JS
+ // A pattern like: *.!(x).!(y|z) needs to ensure that a name
+ // like 'a.xyz.yz' doesn't match. So, the first negative
+ // lookahead, has to look ALL the way ahead, to the end of
+ // the pattern.
+ for (let n = negativeLists.length - 1; n > -1; n--) {
+ const nl = negativeLists[n]
+
+ const nlBefore = re.slice(0, nl.reStart)
+ const nlFirst = re.slice(nl.reStart, nl.reEnd - 8)
+ let nlAfter = re.slice(nl.reEnd)
+ const nlLast = re.slice(nl.reEnd - 8, nl.reEnd) + nlAfter
+
+ // Handle nested stuff like *(*.js|!(*.json)), where open parens
+ // mean that we should *not* include the ) in the bit that is considered
+ // "after" the negated section.
+ const openParensBefore = nlBefore.split('(').length - 1
+ let cleanAfter = nlAfter
+ for (let i = 0; i < openParensBefore; i++) {
+ cleanAfter = cleanAfter.replace(/\)[+*?]?/, '')
+ }
+ nlAfter = cleanAfter
+
+ const dollar = nlAfter === '' && isSub !== SUBPARSE ? '$' : ''
+ re = nlBefore + nlFirst + nlAfter + dollar + nlLast
+ }
+
+ // if the re is not "" at this point, then we need to make sure
+ // it doesn't match against an empty path part.
+ // Otherwise a/* will match a/, which it should not.
+ if (re !== '' && hasMagic) {
+ re = '(?=.)' + re
+ }
+
+ if (addPatternStart) {
+ re = patternStart + re
+ }
+
+ // parsing just a piece of a larger pattern.
+ if (isSub === SUBPARSE) {
+ return [re, hasMagic]
+ }
+
+ // skip the regexp for non-magical patterns
+ // unescape anything in it, though, so that it'll be
+ // an exact match against a file etc.
+ if (!hasMagic) {
+ return globUnescape(pattern)
+ }
+
+ const flags = options.nocase ? 'i' : ''
+ try {
+ return Object.assign(new RegExp('^' + re + '$', flags), {
+ _glob: pattern,
+ _src: re,
+ })
+ } catch (er) /* istanbul ignore next - should be impossible */ {
+ // If it was an invalid regular expression, then it can't match
+ // anything. This trick looks for a character after the end of
+ // the string, which is of course impossible, except in multi-line
+ // mode, but it's not a /m regex.
+ return new RegExp('$.')
+ }
+ }
+
+ makeRe () {
+ if (this.regexp || this.regexp === false) return this.regexp
+
+ // at this point, this.set is a 2d array of partial
+ // pattern strings, or "**".
+ //
+ // It's better to use .match(). This function shouldn't
+ // be used, really, but it's pretty convenient sometimes,
+ // when you just want to work with a regex.
+ const set = this.set
+
+ if (!set.length) {
+ this.regexp = false
+ return this.regexp
+ }
+ const options = this.options
+
+ const twoStar = options.noglobstar ? star
+ : options.dot ? twoStarDot
+ : twoStarNoDot
+ const flags = options.nocase ? 'i' : ''
+
+ // coalesce globstars and regexpify non-globstar patterns
+ // if it's the only item, then we just do one twoStar
+ // if it's the first, and there are more, prepend (\/|twoStar\/)? to next
+ // if it's the last, append (\/twoStar|) to previous
+ // if it's in the middle, append (\/|\/twoStar\/) to previous
+ // then filter out GLOBSTAR symbols
+ let re = set.map(pattern => {
+ pattern = pattern.map(p =>
+ typeof p === 'string' ? regExpEscape(p)
+ : p === GLOBSTAR ? GLOBSTAR
+ : p._src
+ ).reduce((set, p) => {
+ if (!(set[set.length - 1] === GLOBSTAR && p === GLOBSTAR)) {
+ set.push(p)
+ }
+ return set
+ }, [])
+ pattern.forEach((p, i) => {
+ if (p !== GLOBSTAR || pattern[i-1] === GLOBSTAR) {
+ return
+ }
+ if (i === 0) {
+ if (pattern.length > 1) {
+ pattern[i+1] = '(?:\\\/|' + twoStar + '\\\/)?' + pattern[i+1]
+ } else {
+ pattern[i] = twoStar
+ }
+ } else if (i === pattern.length - 1) {
+ pattern[i-1] += '(?:\\\/|' + twoStar + ')?'
+ } else {
+ pattern[i-1] += '(?:\\\/|\\\/' + twoStar + '\\\/)' + pattern[i+1]
+ pattern[i+1] = GLOBSTAR
+ }
+ })
+ return pattern.filter(p => p !== GLOBSTAR).join('/')
+ }).join('|')
+
+ // must match entire pattern
+ // ending in a * or ** will make it less strict.
+ re = '^(?:' + re + ')$'
+
+ // can match anything, as long as it's not this.
+ if (this.negate) re = '^(?!' + re + ').*$'
+
+ try {
+ this.regexp = new RegExp(re, flags)
+ } catch (ex) /* istanbul ignore next - should be impossible */ {
+ this.regexp = false
+ }
+ return this.regexp
+ }
+
+ match (f, partial = this.partial) {
+ this.debug('match', f, this.pattern)
+ // short-circuit in the case of busted things.
+ // comments, etc.
+ if (this.comment) return false
+ if (this.empty) return f === ''
+
+ if (f === '/' && partial) return true
+
+ const options = this.options
+
+ // windows: need to use /, not \
+ if (path.sep !== '/') {
+ f = f.split(path.sep).join('/')
+ }
+
+ // treat the test path as a set of pathparts.
+ f = f.split(slashSplit)
+ this.debug(this.pattern, 'split', f)
+
+ // just ONE of the pattern sets in this.set needs to match
+ // in order for it to be valid. If negating, then just one
+ // match means that we have failed.
+ // Either way, return on the first hit.
+
+ const set = this.set
+ this.debug(this.pattern, 'set', set)
+
+ // Find the basename of the path by looking for the last non-empty segment
+ let filename
+ for (let i = f.length - 1; i >= 0; i--) {
+ filename = f[i]
+ if (filename) break
+ }
+
+ for (let i = 0; i < set.length; i++) {
+ const pattern = set[i]
+ let file = f
+ if (options.matchBase && pattern.length === 1) {
+ file = [filename]
+ }
+ const hit = this.matchOne(file, pattern, partial)
+ if (hit) {
+ if (options.flipNegate) return true
+ return !this.negate
+ }
+ }
+
+ // didn't get any hits. this is success if it's a negative
+ // pattern, failure otherwise.
+ if (options.flipNegate) return false
+ return this.negate
+ }
+
+ static defaults (def) {
+ return minimatch.defaults(def).Minimatch
+ }
+}
+
+minimatch.Minimatch = Minimatch
+
+
+/***/ }),
+
+/***/ 6761:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const Utils = __nccwpck_require__(5182);
+const pth = __nccwpck_require__(1017);
+const ZipEntry = __nccwpck_require__(4057);
+const ZipFile = __nccwpck_require__(7744);
+
+const get_Bool = (val, def) => (typeof val === "boolean" ? val : def);
+const get_Str = (val, def) => (typeof val === "string" ? val : def);
+
+const defaultOptions = {
+ // option "noSort" : if true it disables files sorting
+ noSort: false,
+ // read entries during load (initial loading may be slower)
+ readEntries: false,
+ // default method is none
+ method: Utils.Constants.NONE,
+ // file system
+ fs: null
+};
+
+module.exports = function (/**String*/ input, /** object */ options) {
+ let inBuffer = null;
+
+ // create object based default options, allowing them to be overwritten
+ const opts = Object.assign(Object.create(null), defaultOptions);
+
+ // test input variable
+ if (input && "object" === typeof input) {
+ // if value is not buffer we accept it to be object with options
+ if (!(input instanceof Uint8Array)) {
+ Object.assign(opts, input);
+ input = opts.input ? opts.input : undefined;
+ if (opts.input) delete opts.input;
+ }
+
+ // if input is buffer
+ if (Buffer.isBuffer(input)) {
+ inBuffer = input;
+ opts.method = Utils.Constants.BUFFER;
+ input = undefined;
+ }
+ }
+
+ // assign options
+ Object.assign(opts, options);
+
+ // instanciate utils filesystem
+ const filetools = new Utils(opts);
+
+ // if input is file name we retrieve its content
+ if (input && "string" === typeof input) {
+ // load zip file
+ if (filetools.fs.existsSync(input)) {
+ opts.method = Utils.Constants.FILE;
+ opts.filename = input;
+ inBuffer = filetools.fs.readFileSync(input);
+ } else {
+ throw new Error(Utils.Errors.INVALID_FILENAME);
+ }
+ }
+
+ // create variable
+ const _zip = new ZipFile(inBuffer, opts);
+
+ const { canonical, sanitize } = Utils;
+
+ function getEntry(/**Object*/ entry) {
+ if (entry && _zip) {
+ var item;
+ // If entry was given as a file name
+ if (typeof entry === "string") item = _zip.getEntry(entry);
+ // if entry was given as a ZipEntry object
+ if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName);
+
+ if (item) {
+ return item;
+ }
+ }
+ return null;
+ }
+
+ function fixPath(zipPath) {
+ const { join, normalize, sep } = pth.posix;
+ // convert windows file separators and normalize
+ return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep));
+ }
+
+ return {
+ /**
+ * Extracts the given entry from the archive and returns the content as a Buffer object
+ * @param entry ZipEntry object or String with the full path of the entry
+ *
+ * @return Buffer or Null in case of error
+ */
+ readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) {
+ var item = getEntry(entry);
+ return (item && item.getData(pass)) || null;
+ },
+
+ /**
+ * Asynchronous readFile
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param callback
+ *
+ * @return Buffer or Null in case of error
+ */
+ readFileAsync: function (/**Object*/ entry, /**Function*/ callback) {
+ var item = getEntry(entry);
+ if (item) {
+ item.getDataAsync(callback);
+ } else {
+ callback(null, "getEntry failed for:" + entry);
+ }
+ },
+
+ /**
+ * Extracts the given entry from the archive and returns the content as plain text in the given encoding
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param encoding Optional. If no encoding is specified utf8 is used
+ *
+ * @return String
+ */
+ readAsText: function (/**Object*/ entry, /**String=*/ encoding) {
+ var item = getEntry(entry);
+ if (item) {
+ var data = item.getData();
+ if (data && data.length) {
+ return data.toString(encoding || "utf8");
+ }
+ }
+ return "";
+ },
+
+ /**
+ * Asynchronous readAsText
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param callback
+ * @param encoding Optional. If no encoding is specified utf8 is used
+ *
+ * @return String
+ */
+ readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) {
+ var item = getEntry(entry);
+ if (item) {
+ item.getDataAsync(function (data, err) {
+ if (err) {
+ callback(data, err);
+ return;
+ }
+
+ if (data && data.length) {
+ callback(data.toString(encoding || "utf8"));
+ } else {
+ callback("");
+ }
+ });
+ } else {
+ callback("");
+ }
+ },
+
+ /**
+ * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory
+ *
+ * @param entry
+ */
+ deleteFile: function (/**Object*/ entry) {
+ // @TODO: test deleteFile
+ var item = getEntry(entry);
+ if (item) {
+ _zip.deleteEntry(item.entryName);
+ }
+ },
+
+ /**
+ * Adds a comment to the zip. The zip must be rewritten after adding the comment.
+ *
+ * @param comment
+ */
+ addZipComment: function (/**String*/ comment) {
+ // @TODO: test addZipComment
+ _zip.comment = comment;
+ },
+
+ /**
+ * Returns the zip comment
+ *
+ * @return String
+ */
+ getZipComment: function () {
+ return _zip.comment || "";
+ },
+
+ /**
+ * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment
+ * The comment cannot exceed 65535 characters in length
+ *
+ * @param entry
+ * @param comment
+ */
+ addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) {
+ var item = getEntry(entry);
+ if (item) {
+ item.comment = comment;
+ }
+ },
+
+ /**
+ * Returns the comment of the specified entry
+ *
+ * @param entry
+ * @return String
+ */
+ getZipEntryComment: function (/**Object*/ entry) {
+ var item = getEntry(entry);
+ if (item) {
+ return item.comment || "";
+ }
+ return "";
+ },
+
+ /**
+ * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content
+ *
+ * @param entry
+ * @param content
+ */
+ updateFile: function (/**Object*/ entry, /**Buffer*/ content) {
+ var item = getEntry(entry);
+ if (item) {
+ item.setData(content);
+ }
+ },
+
+ /**
+ * Adds a file from the disk to the archive
+ *
+ * @param localPath File to add to zip
+ * @param zipPath Optional path inside the zip
+ * @param zipName Optional name for the file
+ */
+ addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) {
+ if (filetools.fs.existsSync(localPath)) {
+ // fix ZipPath
+ zipPath = zipPath ? fixPath(zipPath) : "";
+
+ // p - local file name
+ var p = localPath.split("\\").join("/").split("/").pop();
+
+ // add file name into zippath
+ zipPath += zipName ? zipName : p;
+
+ // read file attributes
+ const _attr = filetools.fs.statSync(localPath);
+
+ // add file into zip file
+ this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr);
+ } else {
+ throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ }
+ },
+
+ /**
+ * Adds a local directory and all its nested files and directories to the archive
+ *
+ * @param localPath
+ * @param zipPath optional path inside zip
+ * @param filter optional RegExp or Function if files match will
+ * be included.
+ */
+ addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter) {
+ // Prepare filter
+ if (filter instanceof RegExp) {
+ // if filter is RegExp wrap it
+ filter = (function (rx) {
+ return function (filename) {
+ return rx.test(filename);
+ };
+ })(filter);
+ } else if ("function" !== typeof filter) {
+ // if filter is not function we will replace it
+ filter = function () {
+ return true;
+ };
+ }
+
+ // fix ZipPath
+ zipPath = zipPath ? fixPath(zipPath) : "";
+
+ // normalize the path first
+ localPath = pth.normalize(localPath);
+
+ if (filetools.fs.existsSync(localPath)) {
+ const items = filetools.findFiles(localPath);
+ const self = this;
+
+ if (items.length) {
+ items.forEach(function (filepath) {
+ var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
+ if (filter(p)) {
+ var stats = filetools.fs.statSync(filepath);
+ if (stats.isFile()) {
+ self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", stats);
+ } else {
+ self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
+ }
+ }
+ });
+ }
+ } else {
+ throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ }
+ },
+
+ /**
+ * Asynchronous addLocalFile
+ * @param localPath
+ * @param callback
+ * @param zipPath optional path inside zip
+ * @param filter optional RegExp or Function if files match will
+ * be included.
+ */
+ addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) {
+ if (filter instanceof RegExp) {
+ filter = (function (rx) {
+ return function (filename) {
+ return rx.test(filename);
+ };
+ })(filter);
+ } else if ("function" !== typeof filter) {
+ filter = function () {
+ return true;
+ };
+ }
+
+ // fix ZipPath
+ zipPath = zipPath ? fixPath(zipPath) : "";
+
+ // normalize the path first
+ localPath = pth.normalize(localPath);
+
+ var self = this;
+ filetools.fs.open(localPath, "r", function (err) {
+ if (err && err.code === "ENOENT") {
+ callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath));
+ } else if (err) {
+ callback(undefined, err);
+ } else {
+ var items = filetools.findFiles(localPath);
+ var i = -1;
+
+ var next = function () {
+ i += 1;
+ if (i < items.length) {
+ var filepath = items[i];
+ var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix
+ p = p
+ .normalize("NFD")
+ .replace(/[\u0300-\u036f]/g, "")
+ .replace(/[^\x20-\x7E]/g, ""); // accent fix
+ if (filter(p)) {
+ filetools.fs.stat(filepath, function (er0, stats) {
+ if (er0) callback(undefined, er0);
+ if (stats.isFile()) {
+ filetools.fs.readFile(filepath, function (er1, data) {
+ if (er1) {
+ callback(undefined, er1);
+ } else {
+ self.addFile(zipPath + p, data, "", stats);
+ next();
+ }
+ });
+ } else {
+ self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats);
+ next();
+ }
+ });
+ } else {
+ next();
+ }
+ } else {
+ callback(true, undefined);
+ }
+ };
+
+ next();
+ }
+ });
+ },
+
+ /**
+ *
+ * @param {string} localPath - path where files will be extracted
+ * @param {object} props - optional properties
+ * @param {string} props.zipPath - optional path inside zip
+ * @param {regexp, function} props.filter - RegExp or Function if files match will be included.
+ */
+ addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) {
+ return new Promise((resolve, reject) => {
+ const { filter, zipPath } = Object.assign({}, props);
+ this.addLocalFolderAsync(
+ localPath,
+ (done, err) => {
+ if (err) reject(err);
+ if (done) resolve(this);
+ },
+ zipPath,
+ filter
+ );
+ });
+ },
+
+ /**
+ * Allows you to create a entry (file or directory) in the zip file.
+ * If you want to create a directory the entryName must end in / and a null buffer should be provided.
+ * Comment and attributes are optional
+ *
+ * @param {string} entryName
+ * @param {Buffer | string} content - file content as buffer or utf8 coded string
+ * @param {string} comment - file comment
+ * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object
+ */
+ addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) {
+ let entry = getEntry(entryName);
+ const update = entry != null;
+
+ // prepare new entry
+ if (!update) {
+ entry = new ZipEntry();
+ entry.entryName = entryName;
+ }
+ entry.comment = comment || "";
+
+ const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats;
+
+ // last modification time from file stats
+ if (isStat) {
+ entry.header.time = attr.mtime;
+ }
+
+ // Set file attribute
+ var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag)
+
+ // extended attributes field for Unix
+ if (!Utils.isWin) {
+ // set file type either S_IFDIR / S_IFREG
+ let unix = entry.isDirectory ? 0x4000 : 0x8000;
+
+ if (isStat) {
+ // File attributes from file stats
+ unix |= 0xfff & attr.mode;
+ } else if ("number" === typeof attr) {
+ // attr from given attr values
+ unix |= 0xfff & attr;
+ } else {
+ // Default values:
+ unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--)
+ }
+
+ fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes
+ }
+
+ entry.attr = fileattr;
+
+ entry.setData(content);
+ if (!update) _zip.setEntry(entry);
+ },
+
+ /**
+ * Returns an array of ZipEntry objects representing the files and folders inside the archive
+ *
+ * @return Array
+ */
+ getEntries: function () {
+ return _zip ? _zip.entries : [];
+ },
+
+ /**
+ * Returns a ZipEntry object representing the file or folder specified by ``name``.
+ *
+ * @param name
+ * @return ZipEntry
+ */
+ getEntry: function (/**String*/ name) {
+ return getEntry(name);
+ },
+
+ getEntryCount: function () {
+ return _zip.getEntryCount();
+ },
+
+ forEach: function (callback) {
+ return _zip.forEach(callback);
+ },
+
+ /**
+ * Extracts the given entry to the given targetPath
+ * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted
+ *
+ * @param entry ZipEntry object or String with the full path of the entry
+ * @param targetPath Target folder where to write the file
+ * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder
+ * will be created in targetPath as well. Default is TRUE
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ * @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
+ * Default is FALSE
+ * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file)
+ *
+ * @return Boolean
+ */
+ extractEntryTo: function (
+ /**Object*/ entry,
+ /**String*/ targetPath,
+ /**Boolean*/ maintainEntryPath,
+ /**Boolean*/ overwrite,
+ /**Boolean*/ keepOriginalPermission,
+ /**String**/ outFileName
+ ) {
+ overwrite = get_Bool(overwrite, false);
+ keepOriginalPermission = get_Bool(keepOriginalPermission, false);
+ maintainEntryPath = get_Bool(maintainEntryPath, true);
+ outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined));
+
+ var item = getEntry(entry);
+ if (!item) {
+ throw new Error(Utils.Errors.NO_ENTRY);
+ }
+
+ var entryName = canonical(item.entryName);
+
+ var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName));
+
+ if (item.isDirectory) {
+ var children = _zip.getEntryChildren(item);
+ children.forEach(function (child) {
+ if (child.isDirectory) return;
+ var content = child.getData();
+ if (!content) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ var name = canonical(child.entryName);
+ var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name));
+ // The reverse operation for attr depend on method addFile()
+ const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined;
+ filetools.writeFileTo(childName, content, overwrite, fileAttr);
+ });
+ return true;
+ }
+
+ var content = item.getData();
+ if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+
+ if (filetools.fs.existsSync(target) && !overwrite) {
+ throw new Error(Utils.Errors.CANT_OVERRIDE);
+ }
+ // The reverse operation for attr depend on method addFile()
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
+ filetools.writeFileTo(target, content, overwrite, fileAttr);
+
+ return true;
+ },
+
+ /**
+ * Test the archive
+ *
+ */
+ test: function (pass) {
+ if (!_zip) {
+ return false;
+ }
+
+ for (var entry in _zip.entries) {
+ try {
+ if (entry.isDirectory) {
+ continue;
+ }
+ var content = _zip.entries[entry].getData(pass);
+ if (!content) {
+ return false;
+ }
+ } catch (err) {
+ return false;
+ }
+ }
+ return true;
+ },
+
+ /**
+ * Extracts the entire archive to the given location
+ *
+ * @param targetPath Target location
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ * @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
+ * Default is FALSE
+ */
+ extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) {
+ overwrite = get_Bool(overwrite, false);
+ pass = get_Str(keepOriginalPermission, pass);
+ keepOriginalPermission = get_Bool(keepOriginalPermission, false);
+ if (!_zip) {
+ throw new Error(Utils.Errors.NO_ZIP);
+ }
+ _zip.entries.forEach(function (entry) {
+ var entryName = sanitize(targetPath, canonical(entry.entryName.toString()));
+ if (entry.isDirectory) {
+ filetools.makeDir(entryName);
+ return;
+ }
+ var content = entry.getData(pass);
+ if (!content) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ // The reverse operation for attr depend on method addFile()
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
+ filetools.writeFileTo(entryName, content, overwrite, fileAttr);
+ try {
+ filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time);
+ } catch (err) {
+ throw new Error(Utils.Errors.CANT_EXTRACT_FILE);
+ }
+ });
+ },
+
+ /**
+ * Asynchronous extractAllTo
+ *
+ * @param targetPath Target location
+ * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true.
+ * Default is FALSE
+ * @param keepOriginalPermission The file will be set as the permission from the entry if this is true.
+ * Default is FALSE
+ * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown.
+ */
+ extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) {
+ if (!callback) {
+ callback = function () {};
+ }
+ overwrite = get_Bool(overwrite, false);
+ if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission;
+ keepOriginalPermission = get_Bool(keepOriginalPermission, false);
+ if (!_zip) {
+ callback(new Error(Utils.Errors.NO_ZIP));
+ return;
+ }
+
+ targetPath = pth.resolve(targetPath);
+ // convert entryName to
+ const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString())));
+ const getError = (msg, file) => new Error(msg + ': "' + file + '"');
+
+ // separate directories from files
+ const dirEntries = [];
+ const fileEntries = new Set();
+ _zip.entries.forEach((e) => {
+ if (e.isDirectory) {
+ dirEntries.push(e);
+ } else {
+ fileEntries.add(e);
+ }
+ });
+
+ // Create directory entries first synchronously
+ // this prevents race condition and assures folders are there before writing files
+ for (const entry of dirEntries) {
+ const dirPath = getPath(entry);
+ // The reverse operation for attr depend on method addFile()
+ const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
+ try {
+ filetools.makeDir(dirPath);
+ if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr);
+ // in unix timestamp will change if files are later added to folder, but still
+ filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time);
+ } catch (er) {
+ callback(getError("Unable to create folder", dirPath));
+ }
+ }
+
+ // callback wrapper, for some house keeping
+ const done = () => {
+ if (fileEntries.size === 0) {
+ callback();
+ }
+ };
+
+ // Extract file entries asynchronously
+ for (const entry of fileEntries.values()) {
+ const entryName = pth.normalize(canonical(entry.entryName.toString()));
+ const filePath = sanitize(targetPath, entryName);
+ entry.getDataAsync(function (content, err_1) {
+ if (err_1) {
+ callback(new Error(err_1));
+ return;
+ }
+ if (!content) {
+ callback(new Error(Utils.Errors.CANT_EXTRACT_FILE));
+ } else {
+ // The reverse operation for attr depend on method addFile()
+ const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined;
+ filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) {
+ if (!succ) {
+ callback(getError("Unable to write file", filePath));
+ return;
+ }
+ filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) {
+ if (err_2) {
+ callback(getError("Unable to set times", filePath));
+ return;
+ }
+ fileEntries.delete(entry);
+ // call the callback if it was last entry
+ done();
+ });
+ });
+ }
+ });
+ }
+ // call the callback if fileEntries was empty
+ done();
+ },
+
+ /**
+ * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip
+ *
+ * @param targetFileName
+ * @param callback
+ */
+ writeZip: function (/**String*/ targetFileName, /**Function*/ callback) {
+ if (arguments.length === 1) {
+ if (typeof targetFileName === "function") {
+ callback = targetFileName;
+ targetFileName = "";
+ }
+ }
+
+ if (!targetFileName && opts.filename) {
+ targetFileName = opts.filename;
+ }
+ if (!targetFileName) return;
+
+ var zipData = _zip.compressToBuffer();
+ if (zipData) {
+ var ok = filetools.writeFileTo(targetFileName, zipData, true);
+ if (typeof callback === "function") callback(!ok ? new Error("failed") : null, "");
+ }
+ },
+
+ writeZipPromise: function (/**String*/ targetFileName, /* object */ props) {
+ const { overwrite, perm } = Object.assign({ overwrite: true }, props);
+
+ return new Promise((resolve, reject) => {
+ // find file name
+ if (!targetFileName && opts.filename) targetFileName = opts.filename;
+ if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing");
+
+ this.toBufferPromise().then((zipData) => {
+ const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file"));
+ filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret);
+ }, reject);
+ });
+ },
+
+ toBufferPromise: function () {
+ return new Promise((resolve, reject) => {
+ _zip.toAsyncBuffer(resolve, reject);
+ });
+ },
+
+ /**
+ * Returns the content of the entire zip file as a Buffer object
+ *
+ * @return Buffer
+ */
+ toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) {
+ this.valueOf = 2;
+ if (typeof onSuccess === "function") {
+ _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd);
+ return null;
+ }
+ return _zip.compressToBuffer();
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 9032:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var Utils = __nccwpck_require__(5182),
+ Constants = Utils.Constants;
+
+/* The central directory file header */
+module.exports = function () {
+ var _verMade = 20, // v2.0
+ _version = 10, // v1.0
+ _flags = 0,
+ _method = 0,
+ _time = 0,
+ _crc = 0,
+ _compressedSize = 0,
+ _size = 0,
+ _fnameLen = 0,
+ _extraLen = 0,
+ _comLen = 0,
+ _diskStart = 0,
+ _inattr = 0,
+ _attr = 0,
+ _offset = 0;
+
+ _verMade |= Utils.isWin ? 0x0a00 : 0x0300;
+
+ // Set EFS flag since filename and comment fields are all by default encoded using UTF-8.
+ // Without it file names may be corrupted for other apps when file names use unicode chars
+ _flags |= Constants.FLG_EFS;
+
+ var _dataHeader = {};
+
+ function setTime(val) {
+ val = new Date(val);
+ _time =
+ (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980
+ ((val.getMonth() + 1) << 21) | // b05-08 month
+ (val.getDate() << 16) | // b00-04 hour
+ // 2 bytes time
+ (val.getHours() << 11) | // b11-15 hour
+ (val.getMinutes() << 5) | // b05-10 minute
+ (val.getSeconds() >> 1); // b00-04 seconds divided by 2
+ }
+
+ setTime(+new Date());
+
+ return {
+ get made() {
+ return _verMade;
+ },
+ set made(val) {
+ _verMade = val;
+ },
+
+ get version() {
+ return _version;
+ },
+ set version(val) {
+ _version = val;
+ },
+
+ get flags() {
+ return _flags;
+ },
+ set flags(val) {
+ _flags = val;
+ },
+
+ get method() {
+ return _method;
+ },
+ set method(val) {
+ switch (val) {
+ case Constants.STORED:
+ this.version = 10;
+ case Constants.DEFLATED:
+ default:
+ this.version = 20;
+ }
+ _method = val;
+ },
+
+ get time() {
+ return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1);
+ },
+ set time(val) {
+ setTime(val);
+ },
+
+ get crc() {
+ return _crc;
+ },
+ set crc(val) {
+ _crc = Math.max(0, val) >>> 0;
+ },
+
+ get compressedSize() {
+ return _compressedSize;
+ },
+ set compressedSize(val) {
+ _compressedSize = Math.max(0, val) >>> 0;
+ },
+
+ get size() {
+ return _size;
+ },
+ set size(val) {
+ _size = Math.max(0, val) >>> 0;
+ },
+
+ get fileNameLength() {
+ return _fnameLen;
+ },
+ set fileNameLength(val) {
+ _fnameLen = val;
+ },
+
+ get extraLength() {
+ return _extraLen;
+ },
+ set extraLength(val) {
+ _extraLen = val;
+ },
+
+ get commentLength() {
+ return _comLen;
+ },
+ set commentLength(val) {
+ _comLen = val;
+ },
+
+ get diskNumStart() {
+ return _diskStart;
+ },
+ set diskNumStart(val) {
+ _diskStart = Math.max(0, val) >>> 0;
+ },
+
+ get inAttr() {
+ return _inattr;
+ },
+ set inAttr(val) {
+ _inattr = Math.max(0, val) >>> 0;
+ },
+
+ get attr() {
+ return _attr;
+ },
+ set attr(val) {
+ _attr = Math.max(0, val) >>> 0;
+ },
+
+ // get Unix file permissions
+ get fileAttr() {
+ return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0;
+ },
+
+ get offset() {
+ return _offset;
+ },
+ set offset(val) {
+ _offset = Math.max(0, val) >>> 0;
+ },
+
+ get encripted() {
+ return (_flags & 1) === 1;
+ },
+
+ get entryHeaderSize() {
+ return Constants.CENHDR + _fnameLen + _extraLen + _comLen;
+ },
+
+ get realDataOffset() {
+ return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen;
+ },
+
+ get dataHeader() {
+ return _dataHeader;
+ },
+
+ loadDataHeaderFromBinary: function (/*Buffer*/ input) {
+ var data = input.slice(_offset, _offset + Constants.LOCHDR);
+ // 30 bytes and should start with "PK\003\004"
+ if (data.readUInt32LE(0) !== Constants.LOCSIG) {
+ throw new Error(Utils.Errors.INVALID_LOC);
+ }
+ _dataHeader = {
+ // version needed to extract
+ version: data.readUInt16LE(Constants.LOCVER),
+ // general purpose bit flag
+ flags: data.readUInt16LE(Constants.LOCFLG),
+ // compression method
+ method: data.readUInt16LE(Constants.LOCHOW),
+ // modification time (2 bytes time, 2 bytes date)
+ time: data.readUInt32LE(Constants.LOCTIM),
+ // uncompressed file crc-32 value
+ crc: data.readUInt32LE(Constants.LOCCRC),
+ // compressed size
+ compressedSize: data.readUInt32LE(Constants.LOCSIZ),
+ // uncompressed size
+ size: data.readUInt32LE(Constants.LOCLEN),
+ // filename length
+ fnameLen: data.readUInt16LE(Constants.LOCNAM),
+ // extra field length
+ extraLen: data.readUInt16LE(Constants.LOCEXT)
+ };
+ },
+
+ loadFromBinary: function (/*Buffer*/ data) {
+ // data should be 46 bytes and start with "PK 01 02"
+ if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) {
+ throw new Error(Utils.Errors.INVALID_CEN);
+ }
+ // version made by
+ _verMade = data.readUInt16LE(Constants.CENVEM);
+ // version needed to extract
+ _version = data.readUInt16LE(Constants.CENVER);
+ // encrypt, decrypt flags
+ _flags = data.readUInt16LE(Constants.CENFLG);
+ // compression method
+ _method = data.readUInt16LE(Constants.CENHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ _time = data.readUInt32LE(Constants.CENTIM);
+ // uncompressed file crc-32 value
+ _crc = data.readUInt32LE(Constants.CENCRC);
+ // compressed size
+ _compressedSize = data.readUInt32LE(Constants.CENSIZ);
+ // uncompressed size
+ _size = data.readUInt32LE(Constants.CENLEN);
+ // filename length
+ _fnameLen = data.readUInt16LE(Constants.CENNAM);
+ // extra field length
+ _extraLen = data.readUInt16LE(Constants.CENEXT);
+ // file comment length
+ _comLen = data.readUInt16LE(Constants.CENCOM);
+ // volume number start
+ _diskStart = data.readUInt16LE(Constants.CENDSK);
+ // internal file attributes
+ _inattr = data.readUInt16LE(Constants.CENATT);
+ // external file attributes
+ _attr = data.readUInt32LE(Constants.CENATX);
+ // LOC header offset
+ _offset = data.readUInt32LE(Constants.CENOFF);
+ },
+
+ dataHeaderToBinary: function () {
+ // LOC header size (30 bytes)
+ var data = Buffer.alloc(Constants.LOCHDR);
+ // "PK\003\004"
+ data.writeUInt32LE(Constants.LOCSIG, 0);
+ // version needed to extract
+ data.writeUInt16LE(_version, Constants.LOCVER);
+ // general purpose bit flag
+ data.writeUInt16LE(_flags, Constants.LOCFLG);
+ // compression method
+ data.writeUInt16LE(_method, Constants.LOCHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ data.writeUInt32LE(_time, Constants.LOCTIM);
+ // uncompressed file crc-32 value
+ data.writeUInt32LE(_crc, Constants.LOCCRC);
+ // compressed size
+ data.writeUInt32LE(_compressedSize, Constants.LOCSIZ);
+ // uncompressed size
+ data.writeUInt32LE(_size, Constants.LOCLEN);
+ // filename length
+ data.writeUInt16LE(_fnameLen, Constants.LOCNAM);
+ // extra field length
+ data.writeUInt16LE(_extraLen, Constants.LOCEXT);
+ return data;
+ },
+
+ entryHeaderToBinary: function () {
+ // CEN header size (46 bytes)
+ var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen);
+ // "PK\001\002"
+ data.writeUInt32LE(Constants.CENSIG, 0);
+ // version made by
+ data.writeUInt16LE(_verMade, Constants.CENVEM);
+ // version needed to extract
+ data.writeUInt16LE(_version, Constants.CENVER);
+ // encrypt, decrypt flags
+ data.writeUInt16LE(_flags, Constants.CENFLG);
+ // compression method
+ data.writeUInt16LE(_method, Constants.CENHOW);
+ // modification time (2 bytes time, 2 bytes date)
+ data.writeUInt32LE(_time, Constants.CENTIM);
+ // uncompressed file crc-32 value
+ data.writeUInt32LE(_crc, Constants.CENCRC);
+ // compressed size
+ data.writeUInt32LE(_compressedSize, Constants.CENSIZ);
+ // uncompressed size
+ data.writeUInt32LE(_size, Constants.CENLEN);
+ // filename length
+ data.writeUInt16LE(_fnameLen, Constants.CENNAM);
+ // extra field length
+ data.writeUInt16LE(_extraLen, Constants.CENEXT);
+ // file comment length
+ data.writeUInt16LE(_comLen, Constants.CENCOM);
+ // volume number start
+ data.writeUInt16LE(_diskStart, Constants.CENDSK);
+ // internal file attributes
+ data.writeUInt16LE(_inattr, Constants.CENATT);
+ // external file attributes
+ data.writeUInt32LE(_attr, Constants.CENATX);
+ // LOC header offset
+ data.writeUInt32LE(_offset, Constants.CENOFF);
+ // fill all with
+ data.fill(0x00, Constants.CENHDR);
+ return data;
+ },
+
+ toJSON: function () {
+ const bytes = function (nr) {
+ return nr + " bytes";
+ };
+
+ return {
+ made: _verMade,
+ version: _version,
+ flags: _flags,
+ method: Utils.methodToString(_method),
+ time: this.time,
+ crc: "0x" + _crc.toString(16).toUpperCase(),
+ compressedSize: bytes(_compressedSize),
+ size: bytes(_size),
+ fileNameLength: bytes(_fnameLen),
+ extraLength: bytes(_extraLen),
+ commentLength: bytes(_comLen),
+ diskNumStart: _diskStart,
+ inAttr: _inattr,
+ attr: _attr,
+ offset: _offset,
+ entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen)
+ };
+ },
+
+ toString: function () {
+ return JSON.stringify(this.toJSON(), null, "\t");
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 4958:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+exports.EntryHeader = __nccwpck_require__(9032);
+exports.MainHeader = __nccwpck_require__(4408);
+
+
+/***/ }),
+
+/***/ 4408:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var Utils = __nccwpck_require__(5182),
+ Constants = Utils.Constants;
+
+/* The entries in the end of central directory */
+module.exports = function () {
+ var _volumeEntries = 0,
+ _totalEntries = 0,
+ _size = 0,
+ _offset = 0,
+ _commentLength = 0;
+
+ return {
+ get diskEntries() {
+ return _volumeEntries;
+ },
+ set diskEntries(/*Number*/ val) {
+ _volumeEntries = _totalEntries = val;
+ },
+
+ get totalEntries() {
+ return _totalEntries;
+ },
+ set totalEntries(/*Number*/ val) {
+ _totalEntries = _volumeEntries = val;
+ },
+
+ get size() {
+ return _size;
+ },
+ set size(/*Number*/ val) {
+ _size = val;
+ },
+
+ get offset() {
+ return _offset;
+ },
+ set offset(/*Number*/ val) {
+ _offset = val;
+ },
+
+ get commentLength() {
+ return _commentLength;
+ },
+ set commentLength(/*Number*/ val) {
+ _commentLength = val;
+ },
+
+ get mainHeaderSize() {
+ return Constants.ENDHDR + _commentLength;
+ },
+
+ loadFromBinary: function (/*Buffer*/ data) {
+ // data should be 22 bytes and start with "PK 05 06"
+ // or be 56+ bytes and start with "PK 06 06" for Zip64
+ if (
+ (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) &&
+ (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG)
+ ) {
+ throw new Error(Utils.Errors.INVALID_END);
+ }
+
+ if (data.readUInt32LE(0) === Constants.ENDSIG) {
+ // number of entries on this volume
+ _volumeEntries = data.readUInt16LE(Constants.ENDSUB);
+ // total number of entries
+ _totalEntries = data.readUInt16LE(Constants.ENDTOT);
+ // central directory size in bytes
+ _size = data.readUInt32LE(Constants.ENDSIZ);
+ // offset of first CEN header
+ _offset = data.readUInt32LE(Constants.ENDOFF);
+ // zip file comment length
+ _commentLength = data.readUInt16LE(Constants.ENDCOM);
+ } else {
+ // number of entries on this volume
+ _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB);
+ // total number of entries
+ _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT);
+ // central directory size in bytes
+ _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZ);
+ // offset of first CEN header
+ _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF);
+
+ _commentLength = 0;
+ }
+ },
+
+ toBinary: function () {
+ var b = Buffer.alloc(Constants.ENDHDR + _commentLength);
+ // "PK 05 06" signature
+ b.writeUInt32LE(Constants.ENDSIG, 0);
+ b.writeUInt32LE(0, 4);
+ // number of entries on this volume
+ b.writeUInt16LE(_volumeEntries, Constants.ENDSUB);
+ // total number of entries
+ b.writeUInt16LE(_totalEntries, Constants.ENDTOT);
+ // central directory size in bytes
+ b.writeUInt32LE(_size, Constants.ENDSIZ);
+ // offset of first CEN header
+ b.writeUInt32LE(_offset, Constants.ENDOFF);
+ // zip file comment length
+ b.writeUInt16LE(_commentLength, Constants.ENDCOM);
+ // fill comment memory with spaces so no garbage is left there
+ b.fill(" ", Constants.ENDHDR);
+
+ return b;
+ },
+
+ toJSON: function () {
+ // creates 0x0000 style output
+ const offset = function (nr, len) {
+ let offs = nr.toString(16).toUpperCase();
+ while (offs.length < len) offs = "0" + offs;
+ return "0x" + offs;
+ };
+
+ return {
+ diskEntries: _volumeEntries,
+ totalEntries: _totalEntries,
+ size: _size + " bytes",
+ offset: offset(_offset, 4),
+ commentLength: _commentLength
+ };
+ },
+
+ toString: function () {
+ return JSON.stringify(this.toJSON(), null, "\t");
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 7686:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = function (/*Buffer*/ inbuf) {
+ var zlib = __nccwpck_require__(9796);
+
+ var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 };
+
+ return {
+ deflate: function () {
+ return zlib.deflateRawSync(inbuf, opts);
+ },
+
+ deflateAsync: function (/*Function*/ callback) {
+ var tmp = zlib.createDeflateRaw(opts),
+ parts = [],
+ total = 0;
+ tmp.on("data", function (data) {
+ parts.push(data);
+ total += data.length;
+ });
+ tmp.on("end", function () {
+ var buf = Buffer.alloc(total),
+ written = 0;
+ buf.fill(0);
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ part.copy(buf, written);
+ written += part.length;
+ }
+ callback && callback(buf);
+ });
+ tmp.end(inbuf);
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 3928:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+exports.Deflater = __nccwpck_require__(7686);
+exports.Inflater = __nccwpck_require__(2153);
+exports.ZipCrypto = __nccwpck_require__(3228);
+
+
+/***/ }),
+
+/***/ 2153:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = function (/*Buffer*/ inbuf) {
+ var zlib = __nccwpck_require__(9796);
+
+ return {
+ inflate: function () {
+ return zlib.inflateRawSync(inbuf);
+ },
+
+ inflateAsync: function (/*Function*/ callback) {
+ var tmp = zlib.createInflateRaw(),
+ parts = [],
+ total = 0;
+ tmp.on("data", function (data) {
+ parts.push(data);
+ total += data.length;
+ });
+ tmp.on("end", function () {
+ var buf = Buffer.alloc(total),
+ written = 0;
+ buf.fill(0);
+ for (var i = 0; i < parts.length; i++) {
+ var part = parts[i];
+ part.copy(buf, written);
+ written += part.length;
+ }
+ callback && callback(buf);
+ });
+ tmp.end(inbuf);
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 3228:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+"use strict";
+
+
+// node crypt, we use it for generate salt
+// eslint-disable-next-line node/no-unsupported-features/node-builtins
+const { randomFillSync } = __nccwpck_require__(6113);
+
+// generate CRC32 lookup table
+const crctable = new Uint32Array(256).map((t, crc) => {
+ for (let j = 0; j < 8; j++) {
+ if (0 !== (crc & 1)) {
+ crc = (crc >>> 1) ^ 0xedb88320;
+ } else {
+ crc >>>= 1;
+ }
+ }
+ return crc >>> 0;
+});
+
+// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits)
+const uMul = (a, b) => Math.imul(a, b) >>> 0;
+
+// crc32 byte single update (actually same function is part of utils.crc32 function :) )
+const crc32update = (pCrc32, bval) => {
+ return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8);
+};
+
+// function for generating salt for encrytion header
+const genSalt = () => {
+ if ("function" === typeof randomFillSync) {
+ return randomFillSync(Buffer.alloc(12));
+ } else {
+ // fallback if function is not defined
+ return genSalt.node();
+ }
+};
+
+// salt generation with node random function (mainly as fallback)
+genSalt.node = () => {
+ const salt = Buffer.alloc(12);
+ const len = salt.length;
+ for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff;
+ return salt;
+};
+
+// general config
+const config = {
+ genSalt
+};
+
+// Class Initkeys handles same basic ops with keys
+function Initkeys(pw) {
+ const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw);
+ this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]);
+ for (let i = 0; i < pass.length; i++) {
+ this.updateKeys(pass[i]);
+ }
+}
+
+Initkeys.prototype.updateKeys = function (byteValue) {
+ const keys = this.keys;
+ keys[0] = crc32update(keys[0], byteValue);
+ keys[1] += keys[0] & 0xff;
+ keys[1] = uMul(keys[1], 134775813) + 1;
+ keys[2] = crc32update(keys[2], keys[1] >>> 24);
+ return byteValue;
+};
+
+Initkeys.prototype.next = function () {
+ const k = (this.keys[2] | 2) >>> 0; // key
+ return (uMul(k, k ^ 1) >> 8) & 0xff; // decode
+};
+
+function make_decrypter(/*Buffer*/ pwd) {
+ // 1. Stage initialize key
+ const keys = new Initkeys(pwd);
+
+ // return decrypter function
+ return function (/*Buffer*/ data) {
+ // result - we create new Buffer for results
+ const result = Buffer.alloc(data.length);
+ let pos = 0;
+ // process input data
+ for (let c of data) {
+ //c ^= keys.next();
+ //result[pos++] = c; // decode & Save Value
+ result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte
+ }
+ return result;
+ };
+}
+
+function make_encrypter(/*Buffer*/ pwd) {
+ // 1. Stage initialize key
+ const keys = new Initkeys(pwd);
+
+ // return encrypting function, result and pos is here so we dont have to merge buffers later
+ return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) {
+ // result - we create new Buffer for results
+ if (!result) result = Buffer.alloc(data.length);
+ // process input data
+ for (let c of data) {
+ const k = keys.next(); // save key byte
+ result[pos++] = c ^ k; // save val
+ keys.updateKeys(c); // update keys with decoded byte
+ }
+ return result;
+ };
+}
+
+function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) {
+ if (!data || !Buffer.isBuffer(data) || data.length < 12) {
+ return Buffer.alloc(0);
+ }
+
+ // 1. We Initialize and generate decrypting function
+ const decrypter = make_decrypter(pwd);
+
+ // 2. decrypt salt what is always 12 bytes and is a part of file content
+ const salt = decrypter(data.slice(0, 12));
+
+ // 3. does password meet expectations
+ if (salt[11] !== header.crc >>> 24) {
+ throw "ADM-ZIP: Wrong Password";
+ }
+
+ // 4. decode content
+ return decrypter(data.slice(12));
+}
+
+// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality
+function _salter(data) {
+ if (Buffer.isBuffer(data) && data.length >= 12) {
+ // be aware - currently salting buffer data is modified
+ config.genSalt = function () {
+ return data.slice(0, 12);
+ };
+ } else if (data === "node") {
+ // test salt generation with node random function
+ config.genSalt = genSalt.node;
+ } else {
+ // if value is not acceptable config gets reset.
+ config.genSalt = genSalt;
+ }
+}
+
+function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) {
+ // 1. test data if data is not Buffer we make buffer from it
+ if (data == null) data = Buffer.alloc(0);
+ // if data is not buffer be make buffer from it
+ if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString());
+
+ // 2. We Initialize and generate encrypting function
+ const encrypter = make_encrypter(pwd);
+
+ // 3. generate salt (12-bytes of random data)
+ const salt = config.genSalt();
+ salt[11] = (header.crc >>> 24) & 0xff;
+
+ // old implementations (before PKZip 2.04g) used two byte check
+ if (oldlike) salt[10] = (header.crc >>> 16) & 0xff;
+
+ // 4. create output
+ const result = Buffer.alloc(data.length + 12);
+ encrypter(salt, result);
+
+ // finally encode content
+ return encrypter(data, result, 12);
+}
+
+module.exports = { decrypt, encrypt, _salter };
+
+
+/***/ }),
+
+/***/ 4522:
+/***/ ((module) => {
+
+module.exports = {
+ /* The local file header */
+ LOCHDR : 30, // LOC header size
+ LOCSIG : 0x04034b50, // "PK\003\004"
+ LOCVER : 4, // version needed to extract
+ LOCFLG : 6, // general purpose bit flag
+ LOCHOW : 8, // compression method
+ LOCTIM : 10, // modification time (2 bytes time, 2 bytes date)
+ LOCCRC : 14, // uncompressed file crc-32 value
+ LOCSIZ : 18, // compressed size
+ LOCLEN : 22, // uncompressed size
+ LOCNAM : 26, // filename length
+ LOCEXT : 28, // extra field length
+
+ /* The Data descriptor */
+ EXTSIG : 0x08074b50, // "PK\007\008"
+ EXTHDR : 16, // EXT header size
+ EXTCRC : 4, // uncompressed file crc-32 value
+ EXTSIZ : 8, // compressed size
+ EXTLEN : 12, // uncompressed size
+
+ /* The central directory file header */
+ CENHDR : 46, // CEN header size
+ CENSIG : 0x02014b50, // "PK\001\002"
+ CENVEM : 4, // version made by
+ CENVER : 6, // version needed to extract
+ CENFLG : 8, // encrypt, decrypt flags
+ CENHOW : 10, // compression method
+ CENTIM : 12, // modification time (2 bytes time, 2 bytes date)
+ CENCRC : 16, // uncompressed file crc-32 value
+ CENSIZ : 20, // compressed size
+ CENLEN : 24, // uncompressed size
+ CENNAM : 28, // filename length
+ CENEXT : 30, // extra field length
+ CENCOM : 32, // file comment length
+ CENDSK : 34, // volume number start
+ CENATT : 36, // internal file attributes
+ CENATX : 38, // external file attributes (host system dependent)
+ CENOFF : 42, // LOC header offset
+
+ /* The entries in the end of central directory */
+ ENDHDR : 22, // END header size
+ ENDSIG : 0x06054b50, // "PK\005\006"
+ ENDSUB : 8, // number of entries on this disk
+ ENDTOT : 10, // total number of entries
+ ENDSIZ : 12, // central directory size in bytes
+ ENDOFF : 16, // offset of first CEN header
+ ENDCOM : 20, // zip file comment length
+
+ END64HDR : 20, // zip64 END header size
+ END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007"
+ END64START : 4, // number of the disk with the start of the zip64
+ END64OFF : 8, // relative offset of the zip64 end of central directory
+ END64NUMDISKS : 16, // total number of disks
+
+ ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006"
+ ZIP64HDR : 56, // zip64 record minimum size
+ ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE
+ ZIP64SIZE : 4, // zip64 size of the central directory record
+ ZIP64VEM : 12, // zip64 version made by
+ ZIP64VER : 14, // zip64 version needed to extract
+ ZIP64DSK : 16, // zip64 number of this disk
+ ZIP64DSKDIR : 20, // number of the disk with the start of the record directory
+ ZIP64SUB : 24, // number of entries on this disk
+ ZIP64TOT : 32, // total number of entries
+ ZIP64SIZB : 40, // zip64 central directory size in bytes
+ ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number
+ ZIP64EXTRA : 56, // extensible data sector
+
+ /* Compression methods */
+ STORED : 0, // no compression
+ SHRUNK : 1, // shrunk
+ REDUCED1 : 2, // reduced with compression factor 1
+ REDUCED2 : 3, // reduced with compression factor 2
+ REDUCED3 : 4, // reduced with compression factor 3
+ REDUCED4 : 5, // reduced with compression factor 4
+ IMPLODED : 6, // imploded
+ // 7 reserved for Tokenizing compression algorithm
+ DEFLATED : 8, // deflated
+ ENHANCED_DEFLATED: 9, // enhanced deflated
+ PKWARE : 10,// PKWare DCL imploded
+ // 11 reserved by PKWARE
+ BZIP2 : 12, // compressed using BZIP2
+ // 13 reserved by PKWARE
+ LZMA : 14, // LZMA
+ // 15-17 reserved by PKWARE
+ IBM_TERSE : 18, // compressed using IBM TERSE
+ IBM_LZ77 : 19, // IBM LZ77 z
+ AES_ENCRYPT : 99, // WinZIP AES encryption method
+
+ /* General purpose bit flag */
+ // values can obtained with expression 2**bitnr
+ FLG_ENC : 1, // Bit 0: encrypted file
+ FLG_COMP1 : 2, // Bit 1, compression option
+ FLG_COMP2 : 4, // Bit 2, compression option
+ FLG_DESC : 8, // Bit 3, data descriptor
+ FLG_ENH : 16, // Bit 4, enhanced deflating
+ FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data.
+ FLG_STR : 64, // Bit 6, strong encryption (patented)
+ // Bits 7-10: Currently unused.
+ FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS)
+ // Bit 12: Reserved by PKWARE for enhanced compression.
+ // Bit 13: encrypted the Central Directory (patented).
+ // Bits 14-15: Reserved by PKWARE.
+ FLG_MSK : 4096, // mask header values
+
+ /* Load type */
+ FILE : 2,
+ BUFFER : 1,
+ NONE : 0,
+
+ /* 4.5 Extensible data fields */
+ EF_ID : 0,
+ EF_SIZE : 2,
+
+ /* Header IDs */
+ ID_ZIP64 : 0x0001,
+ ID_AVINFO : 0x0007,
+ ID_PFS : 0x0008,
+ ID_OS2 : 0x0009,
+ ID_NTFS : 0x000a,
+ ID_OPENVMS : 0x000c,
+ ID_UNIX : 0x000d,
+ ID_FORK : 0x000e,
+ ID_PATCH : 0x000f,
+ ID_X509_PKCS7 : 0x0014,
+ ID_X509_CERTID_F : 0x0015,
+ ID_X509_CERTID_C : 0x0016,
+ ID_STRONGENC : 0x0017,
+ ID_RECORD_MGT : 0x0018,
+ ID_X509_PKCS7_RL : 0x0019,
+ ID_IBM1 : 0x0065,
+ ID_IBM2 : 0x0066,
+ ID_POSZIP : 0x4690,
+
+ EF_ZIP64_OR_32 : 0xffffffff,
+ EF_ZIP64_OR_16 : 0xffff,
+ EF_ZIP64_SUNCOMP : 0,
+ EF_ZIP64_SCOMP : 8,
+ EF_ZIP64_RHO : 16,
+ EF_ZIP64_DSN : 24
+};
+
+
+/***/ }),
+
+/***/ 1255:
+/***/ ((module) => {
+
+module.exports = {
+ /* Header error messages */
+ INVALID_LOC: "Invalid LOC header (bad signature)",
+ INVALID_CEN: "Invalid CEN header (bad signature)",
+ INVALID_END: "Invalid END header (bad signature)",
+
+ /* ZipEntry error messages*/
+ NO_DATA: "Nothing to decompress",
+ BAD_CRC: "CRC32 checksum failed",
+ FILE_IN_THE_WAY: "There is a file in the way: %s",
+ UNKNOWN_METHOD: "Invalid/unsupported compression method",
+
+ /* Inflater error messages */
+ AVAIL_DATA: "inflate::Available inflate data did not terminate",
+ INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block",
+ TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes",
+ INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths",
+ INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length",
+ INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete",
+ INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths",
+ INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths",
+ INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement",
+ INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)",
+
+ /* ADM-ZIP error messages */
+ CANT_EXTRACT_FILE: "Could not extract the file",
+ CANT_OVERRIDE: "Target file already exists",
+ NO_ZIP: "No zip file was loaded",
+ NO_ENTRY: "Entry doesn't exist",
+ DIRECTORY_CONTENT_ERROR: "A directory cannot have content",
+ FILE_NOT_FOUND: "File not found: %s",
+ NOT_IMPLEMENTED: "Not implemented",
+ INVALID_FILENAME: "Invalid filename",
+ INVALID_FORMAT: "Invalid or unsupported zip format. No END header found"
+};
+
+
+/***/ }),
+
+/***/ 8321:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const fs = (__nccwpck_require__(2895).require)();
+const pth = __nccwpck_require__(1017);
+
+fs.existsSync = fs.existsSync || pth.existsSync;
+
+module.exports = function (/*String*/ path) {
+ var _path = path || "",
+ _obj = newAttr(),
+ _stat = null;
+
+ function newAttr() {
+ return {
+ directory: false,
+ readonly: false,
+ hidden: false,
+ executable: false,
+ mtime: 0,
+ atime: 0
+ };
+ }
+
+ if (_path && fs.existsSync(_path)) {
+ _stat = fs.statSync(_path);
+ _obj.directory = _stat.isDirectory();
+ _obj.mtime = _stat.mtime;
+ _obj.atime = _stat.atime;
+ _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner
+ _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right
+ _obj.hidden = pth.basename(_path)[0] === ".";
+ } else {
+ console.warn("Invalid path: " + _path);
+ }
+
+ return {
+ get directory() {
+ return _obj.directory;
+ },
+
+ get readOnly() {
+ return _obj.readonly;
+ },
+
+ get hidden() {
+ return _obj.hidden;
+ },
+
+ get mtime() {
+ return _obj.mtime;
+ },
+
+ get atime() {
+ return _obj.atime;
+ },
+
+ get executable() {
+ return _obj.executable;
+ },
+
+ decodeAttributes: function () {},
+
+ encodeAttributes: function () {},
+
+ toJSON: function () {
+ return {
+ path: _path,
+ isDirectory: _obj.directory,
+ isReadOnly: _obj.readonly,
+ isHidden: _obj.hidden,
+ isExecutable: _obj.executable,
+ mTime: _obj.mtime,
+ aTime: _obj.atime
+ };
+ },
+
+ toString: function () {
+ return JSON.stringify(this.toJSON(), null, "\t");
+ }
+ };
+};
+
+
+/***/ }),
+
+/***/ 2895:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+exports.require = function () {
+ if (typeof process === "object" && process.versions && process.versions["electron"]) {
+ try {
+ const originalFs = __nccwpck_require__(2941);
+ if (Object.keys(originalFs).length > 0) {
+ return originalFs;
+ }
+ } catch (e) {}
+ }
+ return __nccwpck_require__(7147);
+};
+
+
+/***/ }),
+
+/***/ 5182:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = __nccwpck_require__(1291);
+module.exports.Constants = __nccwpck_require__(4522);
+module.exports.Errors = __nccwpck_require__(1255);
+module.exports.FileAttr = __nccwpck_require__(8321);
+
+
+/***/ }),
+
+/***/ 1291:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+const fsystem = (__nccwpck_require__(2895).require)();
+const pth = __nccwpck_require__(1017);
+const Constants = __nccwpck_require__(4522);
+const isWin = typeof process === "object" && "win32" === process.platform;
+
+const is_Obj = (obj) => obj && typeof obj === "object";
+
+// generate CRC32 lookup table
+const crcTable = new Uint32Array(256).map((t, c) => {
+ for (let k = 0; k < 8; k++) {
+ if ((c & 1) !== 0) {
+ c = 0xedb88320 ^ (c >>> 1);
+ } else {
+ c >>>= 1;
+ }
+ }
+ return c >>> 0;
+});
+
+// UTILS functions
+
+function Utils(opts) {
+ this.sep = pth.sep;
+ this.fs = fsystem;
+
+ if (is_Obj(opts)) {
+ // custom filesystem
+ if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") {
+ this.fs = opts.fs;
+ }
+ }
+}
+
+module.exports = Utils;
+
+// INSTANCED functions
+
+Utils.prototype.makeDir = function (/*String*/ folder) {
+ const self = this;
+
+ // Sync - make directories tree
+ function mkdirSync(/*String*/ fpath) {
+ let resolvedPath = fpath.split(self.sep)[0];
+ fpath.split(self.sep).forEach(function (name) {
+ if (!name || name.substr(-1, 1) === ":") return;
+ resolvedPath += self.sep + name;
+ var stat;
+ try {
+ stat = self.fs.statSync(resolvedPath);
+ } catch (e) {
+ self.fs.mkdirSync(resolvedPath);
+ }
+ if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath);
+ });
+ }
+
+ mkdirSync(folder);
+};
+
+Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) {
+ const self = this;
+ if (self.fs.existsSync(path)) {
+ if (!overwrite) return false; // cannot overwrite
+
+ var stat = self.fs.statSync(path);
+ if (stat.isDirectory()) {
+ return false;
+ }
+ }
+ var folder = pth.dirname(path);
+ if (!self.fs.existsSync(folder)) {
+ self.makeDir(folder);
+ }
+
+ var fd;
+ try {
+ fd = self.fs.openSync(path, "w", 438); // 0666
+ } catch (e) {
+ self.fs.chmodSync(path, 438);
+ fd = self.fs.openSync(path, "w", 438);
+ }
+ if (fd) {
+ try {
+ self.fs.writeSync(fd, content, 0, content.length, 0);
+ } finally {
+ self.fs.closeSync(fd);
+ }
+ }
+ self.fs.chmodSync(path, attr || 438);
+ return true;
+};
+
+Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) {
+ if (typeof attr === "function") {
+ callback = attr;
+ attr = undefined;
+ }
+
+ const self = this;
+
+ self.fs.exists(path, function (exist) {
+ if (exist && !overwrite) return callback(false);
+
+ self.fs.stat(path, function (err, stat) {
+ if (exist && stat.isDirectory()) {
+ return callback(false);
+ }
+
+ var folder = pth.dirname(path);
+ self.fs.exists(folder, function (exists) {
+ if (!exists) self.makeDir(folder);
+
+ self.fs.open(path, "w", 438, function (err, fd) {
+ if (err) {
+ self.fs.chmod(path, 438, function () {
+ self.fs.open(path, "w", 438, function (err, fd) {
+ self.fs.write(fd, content, 0, content.length, 0, function () {
+ self.fs.close(fd, function () {
+ self.fs.chmod(path, attr || 438, function () {
+ callback(true);
+ });
+ });
+ });
+ });
+ });
+ } else if (fd) {
+ self.fs.write(fd, content, 0, content.length, 0, function () {
+ self.fs.close(fd, function () {
+ self.fs.chmod(path, attr || 438, function () {
+ callback(true);
+ });
+ });
+ });
+ } else {
+ self.fs.chmod(path, attr || 438, function () {
+ callback(true);
+ });
+ }
+ });
+ });
+ });
+ });
+};
+
+Utils.prototype.findFiles = function (/*String*/ path) {
+ const self = this;
+
+ function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) {
+ if (typeof pattern === "boolean") {
+ recursive = pattern;
+ pattern = undefined;
+ }
+ let files = [];
+ self.fs.readdirSync(dir).forEach(function (file) {
+ var path = pth.join(dir, file);
+
+ if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive));
+
+ if (!pattern || pattern.test(path)) {
+ files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : ""));
+ }
+ });
+ return files;
+ }
+
+ return findSync(path, undefined, true);
+};
+
+Utils.prototype.getAttributes = function () {};
+
+Utils.prototype.setAttributes = function () {};
+
+// STATIC functions
+
+// crc32 single update (it is part of crc32)
+Utils.crc32update = function (crc, byte) {
+ return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8);
+};
+
+Utils.crc32 = function (buf) {
+ if (typeof buf === "string") {
+ buf = Buffer.from(buf, "utf8");
+ }
+ // Generate crcTable
+ if (!crcTable.length) genCRCTable();
+
+ let len = buf.length;
+ let crc = ~0;
+ for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]);
+ // xor and cast as uint32 number
+ return ~crc >>> 0;
+};
+
+Utils.methodToString = function (/*Number*/ method) {
+ switch (method) {
+ case Constants.STORED:
+ return "STORED (" + method + ")";
+ case Constants.DEFLATED:
+ return "DEFLATED (" + method + ")";
+ default:
+ return "UNSUPPORTED (" + method + ")";
+ }
+};
+
+// removes ".." style path elements
+Utils.canonical = function (/*string*/ path) {
+ if (!path) return "";
+ // trick normalize think path is absolute
+ var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/"));
+ return pth.join(".", safeSuffix);
+};
+
+// make abolute paths taking prefix as root folder
+Utils.sanitize = function (/*string*/ prefix, /*string*/ name) {
+ prefix = pth.resolve(pth.normalize(prefix));
+ var parts = name.split("/");
+ for (var i = 0, l = parts.length; i < l; i++) {
+ var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep)));
+ if (path.indexOf(prefix) === 0) {
+ return path;
+ }
+ }
+ return pth.normalize(pth.join(prefix, pth.basename(name)));
+};
+
+// converts buffer, Uint8Array, string types to buffer
+Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) {
+ if (Buffer.isBuffer(input)) {
+ return input;
+ } else if (input instanceof Uint8Array) {
+ return Buffer.from(input);
+ } else {
+ // expect string all other values are invalid and return empty buffer
+ return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0);
+ }
+};
+
+Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) {
+ var slice = Buffer.from(buffer.slice(index, index + 8));
+ slice.swap64();
+
+ return parseInt(`0x${slice.toString("hex")}`);
+};
+
+Utils.isWin = isWin; // Do we have windows system
+Utils.crcTable = crcTable;
+
+
+/***/ }),
+
+/***/ 4057:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var Utils = __nccwpck_require__(5182),
+ Headers = __nccwpck_require__(4958),
+ Constants = Utils.Constants,
+ Methods = __nccwpck_require__(3928);
+
+module.exports = function (/*Buffer*/ input) {
+ var _entryHeader = new Headers.EntryHeader(),
+ _entryName = Buffer.alloc(0),
+ _comment = Buffer.alloc(0),
+ _isDirectory = false,
+ uncompressedData = null,
+ _extra = Buffer.alloc(0);
+
+ function getCompressedDataFromZip() {
+ if (!input || !Buffer.isBuffer(input)) {
+ return Buffer.alloc(0);
+ }
+ _entryHeader.loadDataHeaderFromBinary(input);
+ return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize);
+ }
+
+ function crc32OK(data) {
+ // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written
+ if ((_entryHeader.flags & 0x8) !== 0x8) {
+ if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) {
+ return false;
+ }
+ } else {
+ // @TODO: load and check data descriptor header
+ // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure
+ // (optionally preceded by a 4-byte signature) immediately after the compressed data:
+ }
+ return true;
+ }
+
+ function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) {
+ if (typeof callback === "undefined" && typeof async === "string") {
+ pass = async;
+ async = void 0;
+ }
+ if (_isDirectory) {
+ if (async && callback) {
+ callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error.
+ }
+ return Buffer.alloc(0);
+ }
+
+ var compressedData = getCompressedDataFromZip();
+
+ if (compressedData.length === 0) {
+ // File is empty, nothing to decompress.
+ if (async && callback) callback(compressedData);
+ return compressedData;
+ }
+
+ if (_entryHeader.encripted) {
+ if ("string" !== typeof pass && !Buffer.isBuffer(pass)) {
+ throw new Error("ADM-ZIP: Incompatible password parameter");
+ }
+ compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass);
+ }
+
+ var data = Buffer.alloc(_entryHeader.size);
+
+ switch (_entryHeader.method) {
+ case Utils.Constants.STORED:
+ compressedData.copy(data);
+ if (!crc32OK(data)) {
+ if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error
+ throw new Error(Utils.Errors.BAD_CRC);
+ } else {
+ //si added otherwise did not seem to return data.
+ if (async && callback) callback(data);
+ return data;
+ }
+ case Utils.Constants.DEFLATED:
+ var inflater = new Methods.Inflater(compressedData);
+ if (!async) {
+ const result = inflater.inflate(data);
+ result.copy(data, 0);
+ if (!crc32OK(data)) {
+ throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString());
+ }
+ return data;
+ } else {
+ inflater.inflateAsync(function (result) {
+ result.copy(result, 0);
+ if (callback) {
+ if (!crc32OK(result)) {
+ callback(result, Utils.Errors.BAD_CRC); //si added error
+ } else {
+ callback(result);
+ }
+ }
+ });
+ }
+ break;
+ default:
+ if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD);
+ throw new Error(Utils.Errors.UNKNOWN_METHOD);
+ }
+ }
+
+ function compress(/*Boolean*/ async, /*Function*/ callback) {
+ if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) {
+ // no data set or the data wasn't changed to require recompression
+ if (async && callback) callback(getCompressedDataFromZip());
+ return getCompressedDataFromZip();
+ }
+
+ if (uncompressedData.length && !_isDirectory) {
+ var compressedData;
+ // Local file header
+ switch (_entryHeader.method) {
+ case Utils.Constants.STORED:
+ _entryHeader.compressedSize = _entryHeader.size;
+
+ compressedData = Buffer.alloc(uncompressedData.length);
+ uncompressedData.copy(compressedData);
+
+ if (async && callback) callback(compressedData);
+ return compressedData;
+ default:
+ case Utils.Constants.DEFLATED:
+ var deflater = new Methods.Deflater(uncompressedData);
+ if (!async) {
+ var deflated = deflater.deflate();
+ _entryHeader.compressedSize = deflated.length;
+ return deflated;
+ } else {
+ deflater.deflateAsync(function (data) {
+ compressedData = Buffer.alloc(data.length);
+ _entryHeader.compressedSize = data.length;
+ data.copy(compressedData);
+ callback && callback(compressedData);
+ });
+ }
+ deflater = null;
+ break;
+ }
+ } else if (async && callback) {
+ callback(Buffer.alloc(0));
+ } else {
+ return Buffer.alloc(0);
+ }
+ }
+
+ function readUInt64LE(buffer, offset) {
+ return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset);
+ }
+
+ function parseExtra(data) {
+ var offset = 0;
+ var signature, size, part;
+ while (offset < data.length) {
+ signature = data.readUInt16LE(offset);
+ offset += 2;
+ size = data.readUInt16LE(offset);
+ offset += 2;
+ part = data.slice(offset, offset + size);
+ offset += size;
+ if (Constants.ID_ZIP64 === signature) {
+ parseZip64ExtendedInformation(part);
+ }
+ }
+ }
+
+ //Override header field values with values from the ZIP64 extra field
+ function parseZip64ExtendedInformation(data) {
+ var size, compressedSize, offset, diskNumStart;
+
+ if (data.length >= Constants.EF_ZIP64_SCOMP) {
+ size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP);
+ if (_entryHeader.size === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.size = size;
+ }
+ }
+ if (data.length >= Constants.EF_ZIP64_RHO) {
+ compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP);
+ if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.compressedSize = compressedSize;
+ }
+ }
+ if (data.length >= Constants.EF_ZIP64_DSN) {
+ offset = readUInt64LE(data, Constants.EF_ZIP64_RHO);
+ if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) {
+ _entryHeader.offset = offset;
+ }
+ }
+ if (data.length >= Constants.EF_ZIP64_DSN + 4) {
+ diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN);
+ if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) {
+ _entryHeader.diskNumStart = diskNumStart;
+ }
+ }
+ }
+
+ return {
+ get entryName() {
+ return _entryName.toString();
+ },
+ get rawEntryName() {
+ return _entryName;
+ },
+ set entryName(val) {
+ _entryName = Utils.toBuffer(val);
+ var lastChar = _entryName[_entryName.length - 1];
+ _isDirectory = lastChar === 47 || lastChar === 92;
+ _entryHeader.fileNameLength = _entryName.length;
+ },
+
+ get extra() {
+ return _extra;
+ },
+ set extra(val) {
+ _extra = val;
+ _entryHeader.extraLength = val.length;
+ parseExtra(val);
+ },
+
+ get comment() {
+ return _comment.toString();
+ },
+ set comment(val) {
+ _comment = Utils.toBuffer(val);
+ _entryHeader.commentLength = _comment.length;
+ },
+
+ get name() {
+ var n = _entryName.toString();
+ return _isDirectory
+ ? n
+ .substr(n.length - 1)
+ .split("/")
+ .pop()
+ : n.split("/").pop();
+ },
+ get isDirectory() {
+ return _isDirectory;
+ },
+
+ getCompressedData: function () {
+ return compress(false, null);
+ },
+
+ getCompressedDataAsync: function (/*Function*/ callback) {
+ compress(true, callback);
+ },
+
+ setData: function (value) {
+ uncompressedData = Utils.toBuffer(value);
+ if (!_isDirectory && uncompressedData.length) {
+ _entryHeader.size = uncompressedData.length;
+ _entryHeader.method = Utils.Constants.DEFLATED;
+ _entryHeader.crc = Utils.crc32(value);
+ _entryHeader.changed = true;
+ } else {
+ // folders and blank files should be stored
+ _entryHeader.method = Utils.Constants.STORED;
+ }
+ },
+
+ getData: function (pass) {
+ if (_entryHeader.changed) {
+ return uncompressedData;
+ } else {
+ return decompress(false, null, pass);
+ }
+ },
+
+ getDataAsync: function (/*Function*/ callback, pass) {
+ if (_entryHeader.changed) {
+ callback(uncompressedData);
+ } else {
+ decompress(true, callback, pass);
+ }
+ },
+
+ set attr(attr) {
+ _entryHeader.attr = attr;
+ },
+ get attr() {
+ return _entryHeader.attr;
+ },
+
+ set header(/*Buffer*/ data) {
+ _entryHeader.loadFromBinary(data);
+ },
+
+ get header() {
+ return _entryHeader;
+ },
+
+ packHeader: function () {
+ // 1. create header (buffer)
+ var header = _entryHeader.entryHeaderToBinary();
+ var addpos = Utils.Constants.CENHDR;
+ // 2. add file name
+ _entryName.copy(header, addpos);
+ addpos += _entryName.length;
+ // 3. add extra data
+ if (_entryHeader.extraLength) {
+ _extra.copy(header, addpos);
+ addpos += _entryHeader.extraLength;
+ }
+ // 4. add file comment
+ if (_entryHeader.commentLength) {
+ _comment.copy(header, addpos);
+ }
+ return header;
+ },
+
+ toJSON: function () {
+ const bytes = function (nr) {
+ return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">";
+ };
-"use strict";
+ return {
+ entryName: this.entryName,
+ name: this.name,
+ comment: this.comment,
+ isDirectory: this.isDirectory,
+ header: _entryHeader.toJSON(),
+ compressedData: bytes(input),
+ data: bytes(uncompressedData)
+ };
+ },
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isSpaceScopedArgs = void 0;
-function isSpaceScopedArgs(args) {
- return "spaceName" in args;
-}
-exports.isSpaceScopedArgs = isSpaceScopedArgs;
+ toString: function () {
+ return JSON.stringify(this.toJSON(), null, "\t");
+ }
+ };
+};
/***/ }),
-/***/ 3667:
-/***/ ((__unused_webpack_module, exports) => {
+/***/ 7744:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
-"use strict";
+const ZipEntry = __nccwpck_require__(4057);
+const Headers = __nccwpck_require__(4958);
+const Utils = __nccwpck_require__(5182);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isSpaceScopedOperation = void 0;
-function isSpaceScopedOperation(command) {
- return "spaceName" in command;
-}
-exports.isSpaceScopedOperation = isSpaceScopedOperation;
+module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) {
+ var entryList = [],
+ entryTable = {},
+ _comment = Buffer.alloc(0),
+ mainHeader = new Headers.MainHeader(),
+ loadedEntries = false;
+ // assign options
+ const opts = Object.assign(Object.create(null), options);
-/***/ }),
+ const { noSort } = opts;
-/***/ 3295:
-/***/ ((__unused_webpack_module, exports) => {
+ if (inBuffer) {
+ // is a memory buffer
+ readMainHeader(opts.readEntries);
+ } else {
+ // none. is a new file
+ loadedEntries = true;
+ }
-"use strict";
+ function iterateEntries(callback) {
+ const totalEntries = mainHeader.diskEntries; // total number of entries
+ let index = mainHeader.offset; // offset of first CEN header
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isSpaceScopedRequest = void 0;
-function isSpaceScopedRequest(command) {
- return "spaceName" in command;
-}
-exports.isSpaceScopedRequest = isSpaceScopedRequest;
+ for (let i = 0; i < totalEntries; i++) {
+ let tmp = index;
+ const entry = new ZipEntry(inBuffer);
+ entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
+ entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
-/***/ }),
+ index += entry.header.entryHeaderSize;
-/***/ 492:
-/***/ ((__unused_webpack_module, exports) => {
+ callback(entry);
+ }
+ }
-"use strict";
+ function readEntries() {
+ loadedEntries = true;
+ entryTable = {};
+ entryList = new Array(mainHeader.diskEntries); // total number of entries
+ var index = mainHeader.offset; // offset of first CEN header
+ for (var i = 0; i < entryList.length; i++) {
+ var tmp = index,
+ entry = new ZipEntry(inBuffer);
+ entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR));
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isSpaceScopedResource = void 0;
-function isSpaceScopedResource(resource) {
- return "SpaceId" in resource;
-}
-exports.isSpaceScopedResource = isSpaceScopedResource;
+ entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength));
+ if (entry.header.extraLength) {
+ entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength));
+ }
-/***/ }),
+ if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength);
-/***/ 7218:
-/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+ index += entry.header.entryHeaderSize;
-"use strict";
+ entryList[i] = entry;
+ entryTable[entry.entryName] = entry;
+ }
+ }
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.spaceScopedRoutePrefix = void 0;
-var apiLocation_1 = __nccwpck_require__(7083);
-exports.spaceScopedRoutePrefix = "".concat(apiLocation_1.apiLocation, "/{spaceId}");
+ function readMainHeader(/*Boolean*/ readNow) {
+ var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size
+ max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length
+ n = max,
+ endStart = inBuffer.length,
+ endOffset = -1, // Start offset of the END header
+ commentEnd = 0;
+ for (i; i >= n; i--) {
+ if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P'
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) {
+ // "PK\005\006"
+ endOffset = i;
+ commentEnd = i;
+ endStart = i + Utils.Constants.ENDHDR;
+ // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature
+ n = i - Utils.Constants.END64HDR;
+ continue;
+ }
-/***/ }),
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) {
+ // Found a zip64 signature, let's continue reading the whole zip64 record
+ n = max;
+ continue;
+ }
-/***/ 1547:
-/***/ ((__unused_webpack_module, exports) => {
+ if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) {
+ // Found the zip64 record, let's determine it's size
+ endOffset = i;
+ endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD;
+ break;
+ }
+ }
-"use strict";
+ if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT);
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.SubscriptionRecord = void 0;
-var SubscriptionRecord = /** @class */ (function () {
- function SubscriptionRecord() {
- this.subscriptions = {};
+ mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart));
+ if (mainHeader.commentLength) {
+ _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR);
+ }
+ if (readNow) readEntries();
}
- SubscriptionRecord.prototype.subscribe = function (registrationName, callback) {
- var _this = this;
- this.subscriptions[registrationName] = callback;
- return function () { return _this.unsubscribe(registrationName); };
- };
- SubscriptionRecord.prototype.unsubscribe = function (registrationName) {
- delete this.subscriptions[registrationName];
- };
- SubscriptionRecord.prototype.notify = function (predicate, data) {
- var _this = this;
- Object.keys(this.subscriptions)
- .filter(predicate)
- .forEach(function (key) { return _this.subscriptions[key](data); });
- };
- SubscriptionRecord.prototype.notifyAll = function (data) {
- this.notify(function () { return true; }, data);
- };
- SubscriptionRecord.prototype.notifySingle = function (registrationName, data) {
- if (registrationName in this.subscriptions) {
- this.subscriptions[registrationName](data);
+
+ function sortEntries() {
+ if (entryList.length > 1 && !noSort) {
+ entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase()));
}
- };
- return SubscriptionRecord;
-}());
-exports.SubscriptionRecord = SubscriptionRecord;
+ }
+ return {
+ /**
+ * Returns an array of ZipEntry objects existent in the current opened archive
+ * @return Array
+ */
+ get entries() {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ return entryList;
+ },
-/***/ }),
+ /**
+ * Archive comment
+ * @return {String}
+ */
+ get comment() {
+ return _comment.toString();
+ },
+ set comment(val) {
+ _comment = Utils.toBuffer(val);
+ mainHeader.commentLength = _comment.length;
+ },
-/***/ 7132:
-/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
+ getEntryCount: function () {
+ if (!loadedEntries) {
+ return mainHeader.diskEntries;
+ }
-"use strict";
+ return entryList.length;
+ },
-var __importDefault = (this && this.__importDefault) || function (mod) {
- return (mod && mod.__esModule) ? mod : { "default": mod };
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.isPropertyDefinedAndNotNull = exports.typeSafeHasOwnProperty = exports.ensureSuffix = exports.ensurePrefix = exports.determineServerEndpoint = exports.getResolver = exports.getServerEndpoint = exports.getQueryValue = void 0;
-var lodash_1 = __nccwpck_require__(250);
-var urijs_1 = __importDefault(__nccwpck_require__(4190));
-var resolver_1 = __nccwpck_require__(8043);
-var getQueryValue = function (key, location) {
- var result;
- (0, urijs_1.default)(location).hasQuery(key, function (value) {
- result = value;
- });
- return result;
-};
-exports.getQueryValue = getQueryValue;
-var getServerEndpoint = function (location) {
- if (location === void 0) { location = window.location; }
- return (0, exports.getQueryValue)("octopus.server", location.href) || (0, exports.determineServerEndpoint)(location);
-};
-exports.getServerEndpoint = getServerEndpoint;
-var getResolver = function (base) {
- var resolver = new resolver_1.Resolver(base);
- return resolver.resolve.bind(resolver);
-};
-exports.getResolver = getResolver;
-var determineServerEndpoint = function (location) {
- var endpoint = (0, exports.ensureSuffix)("//", "" + location.protocol) + location.host;
- var path = (0, exports.ensurePrefix)("/", location.pathname);
- if (path.length >= 1) {
- var lastSegmentIndex = path.lastIndexOf("/");
- if (lastSegmentIndex >= 0) {
- path = path.substring(0, lastSegmentIndex + 1);
+ forEach: function (callback) {
+ if (!loadedEntries) {
+ iterateEntries(callback);
+ return;
+ }
+
+ entryList.forEach(callback);
+ },
+
+ /**
+ * Returns a reference to the entry with the given name or null if entry is inexistent
+ *
+ * @param entryName
+ * @return ZipEntry
+ */
+ getEntry: function (/*String*/ entryName) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ return entryTable[entryName] || null;
+ },
+
+ /**
+ * Adds the given entry to the entry list
+ *
+ * @param entry
+ */
+ setEntry: function (/*ZipEntry*/ entry) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ entryList.push(entry);
+ entryTable[entry.entryName] = entry;
+ mainHeader.totalEntries = entryList.length;
+ },
+
+ /**
+ * Removes the entry with the given name from the entry list.
+ *
+ * If the entry is a directory, then all nested files and directories will be removed
+ * @param entryName
+ */
+ deleteEntry: function (/*String*/ entryName) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ var entry = entryTable[entryName];
+ if (entry && entry.isDirectory) {
+ var _self = this;
+ this.getEntryChildren(entry).forEach(function (child) {
+ if (child.entryName !== entryName) {
+ _self.deleteEntry(child.entryName);
+ }
+ });
+ }
+ entryList.splice(entryList.indexOf(entry), 1);
+ delete entryTable[entryName];
+ mainHeader.totalEntries = entryList.length;
+ },
+
+ /**
+ * Iterates and returns all nested files and directories of the given entry
+ *
+ * @param entry
+ * @return Array
+ */
+ getEntryChildren: function (/*ZipEntry*/ entry) {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ if (entry && entry.isDirectory) {
+ const list = [];
+ const name = entry.entryName;
+ const len = name.length;
+
+ entryList.forEach(function (zipEntry) {
+ if (zipEntry.entryName.substr(0, len) === name) {
+ list.push(zipEntry);
+ }
+ });
+ return list;
+ }
+ return [];
+ },
+
+ /**
+ * Returns the zip file
+ *
+ * @return Buffer
+ */
+ compressToBuffer: function () {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ sortEntries();
+
+ const dataBlock = [];
+ const entryHeaders = [];
+ let totalSize = 0;
+ let dindex = 0;
+
+ mainHeader.size = 0;
+ mainHeader.offset = 0;
+
+ for (const entry of entryList) {
+ // compress data and set local and entry header accordingly. Reason why is called first
+ const compressedData = entry.getCompressedData();
+ // 1. construct data header
+ entry.header.offset = dindex;
+ const dataHeader = entry.header.dataHeaderToBinary();
+ const entryNameLen = entry.rawEntryName.length;
+ // 1.2. postheader - data after data header
+ const postHeader = Buffer.alloc(entryNameLen + entry.extra.length);
+ entry.rawEntryName.copy(postHeader, 0);
+ postHeader.copy(entry.extra, entryNameLen);
+
+ // 2. offsets
+ const dataLength = dataHeader.length + postHeader.length + compressedData.length;
+ dindex += dataLength;
+
+ // 3. store values in sequence
+ dataBlock.push(dataHeader);
+ dataBlock.push(postHeader);
+ dataBlock.push(compressedData);
+
+ // 4. construct entry header
+ const entryHeader = entry.packHeader();
+ entryHeaders.push(entryHeader);
+ // 5. update main header
+ mainHeader.size += entryHeader.length;
+ totalSize += dataLength + entryHeader.length;
+ }
+
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
+ // point to end of data and beginning of central directory first record
+ mainHeader.offset = dindex;
+
+ dindex = 0;
+ const outBuffer = Buffer.alloc(totalSize);
+ // write data blocks
+ for (const content of dataBlock) {
+ content.copy(outBuffer, dindex);
+ dindex += content.length;
+ }
+
+ // write central directory entries
+ for (const content of entryHeaders) {
+ content.copy(outBuffer, dindex);
+ dindex += content.length;
+ }
+
+ // write main header
+ const mh = mainHeader.toBinary();
+ if (_comment) {
+ _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
+ }
+ mh.copy(outBuffer, dindex);
+
+ return outBuffer;
+ },
+
+ toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) {
+ try {
+ if (!loadedEntries) {
+ readEntries();
+ }
+ sortEntries();
+
+ const dataBlock = [];
+ const entryHeaders = [];
+ let totalSize = 0;
+ let dindex = 0;
+
+ mainHeader.size = 0;
+ mainHeader.offset = 0;
+
+ const compress2Buffer = function (entryLists) {
+ if (entryLists.length) {
+ const entry = entryLists.pop();
+ const name = entry.entryName + entry.extra.toString();
+ if (onItemStart) onItemStart(name);
+ entry.getCompressedDataAsync(function (compressedData) {
+ if (onItemEnd) onItemEnd(name);
+
+ entry.header.offset = dindex;
+ // data header
+ const dataHeader = entry.header.dataHeaderToBinary();
+ const postHeader = Buffer.alloc(name.length, name);
+ const dataLength = dataHeader.length + postHeader.length + compressedData.length;
+
+ dindex += dataLength;
+
+ dataBlock.push(dataHeader);
+ dataBlock.push(postHeader);
+ dataBlock.push(compressedData);
+
+ const entryHeader = entry.packHeader();
+ entryHeaders.push(entryHeader);
+ mainHeader.size += entryHeader.length;
+ totalSize += dataLength + entryHeader.length;
+
+ compress2Buffer(entryLists);
+ });
+ } else {
+ totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length
+ // point to end of data and beginning of central directory first record
+ mainHeader.offset = dindex;
+
+ dindex = 0;
+ const outBuffer = Buffer.alloc(totalSize);
+ dataBlock.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write data blocks
+ dindex += content.length;
+ });
+ entryHeaders.forEach(function (content) {
+ content.copy(outBuffer, dindex); // write central directory entries
+ dindex += content.length;
+ });
+
+ const mh = mainHeader.toBinary();
+ if (_comment) {
+ _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment
+ }
+
+ mh.copy(outBuffer, dindex); // write main header
+
+ onSuccess(outBuffer);
+ }
+ };
+
+ compress2Buffer(entryList);
+ } catch (e) {
+ onFail(e);
+ }
}
- }
- endpoint = endpoint + path;
- return endpoint;
-};
-exports.determineServerEndpoint = determineServerEndpoint;
-exports.ensurePrefix = (0, lodash_1.curry)(function (prefix, value) { return (!value.startsWith(prefix) ? "".concat(prefix).concat(value) : value); });
-exports.ensureSuffix = (0, lodash_1.curry)(function (suffix, value) { return (!value.endsWith(suffix) ? "".concat(value).concat(suffix) : value); });
-var typeSafeHasOwnProperty = function (target, key) {
- return target.hasOwnProperty(key);
-};
-exports.typeSafeHasOwnProperty = typeSafeHasOwnProperty;
-var isPropertyDefinedAndNotNull = function (target, key) {
- return (0, exports.typeSafeHasOwnProperty)(target, key) && target[key] !== null && target[key] !== undefined;
+ };
};
-exports.isPropertyDefinedAndNotNull = isPropertyDefinedAndNotNull;
/***/ }),
@@ -7941,7 +13868,7 @@ axios.Axios = Axios;
// Expose Cancel & CancelToken
axios.CanceledError = __nccwpck_require__(4098);
axios.CancelToken = __nccwpck_require__(1587);
-axios.isCancel = __nccwpck_require__(4057);
+axios.isCancel = __nccwpck_require__(6719);
axios.VERSION = (__nccwpck_require__(4322).version);
axios.toFormData = __nccwpck_require__(470);
@@ -8125,7 +14052,7 @@ module.exports = CanceledError;
/***/ }),
-/***/ 4057:
+/***/ 6719:
/***/ ((module) => {
"use strict";
@@ -8498,7 +14425,7 @@ module.exports = function buildFullPath(baseURL, requestedURL) {
var utils = __nccwpck_require__(328);
var transformData = __nccwpck_require__(9812);
-var isCancel = __nccwpck_require__(4057);
+var isCancel = __nccwpck_require__(6719);
var defaults = __nccwpck_require__(1626);
var CanceledError = __nccwpck_require__(4098);
@@ -10022,6 +15949,76 @@ module.exports = {
};
+/***/ }),
+
+/***/ 9417:
+/***/ ((module) => {
+
+"use strict";
+
+module.exports = balanced;
+function balanced(a, b, str) {
+ if (a instanceof RegExp) a = maybeMatch(a, str);
+ if (b instanceof RegExp) b = maybeMatch(b, str);
+
+ var r = range(a, b, str);
+
+ return r && {
+ start: r[0],
+ end: r[1],
+ pre: str.slice(0, r[0]),
+ body: str.slice(r[0] + a.length, r[1]),
+ post: str.slice(r[1] + b.length)
+ };
+}
+
+function maybeMatch(reg, str) {
+ var m = str.match(reg);
+ return m ? m[0] : null;
+}
+
+balanced.range = range;
+function range(a, b, str) {
+ var begs, beg, left, right, result;
+ var ai = str.indexOf(a);
+ var bi = str.indexOf(b, ai + 1);
+ var i = ai;
+
+ if (ai >= 0 && bi > 0) {
+ if(a===b) {
+ return [ai, bi];
+ }
+ begs = [];
+ left = str.length;
+
+ while (i >= 0 && !result) {
+ if (i == ai) {
+ begs.push(i);
+ ai = str.indexOf(a, i + 1);
+ } else if (begs.length == 1) {
+ result = [ begs.pop(), bi ];
+ } else {
+ beg = begs.pop();
+ if (beg < left) {
+ left = beg;
+ right = bi;
+ }
+
+ bi = str.indexOf(b, i + 1);
+ }
+
+ i = ai < bi && ai >= 0 ? ai : bi;
+ }
+
+ if (begs.length) {
+ result = [ left, right ];
+ }
+ }
+
+ return result;
+}
+
+
/***/ }),
/***/ 5443:
@@ -12370,6 +18367,389 @@ module.exports = function(dst, src) {
};
+/***/ }),
+
+/***/ 6863:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+module.exports = realpath
+realpath.realpath = realpath
+realpath.sync = realpathSync
+realpath.realpathSync = realpathSync
+realpath.monkeypatch = monkeypatch
+realpath.unmonkeypatch = unmonkeypatch
+
+var fs = __nccwpck_require__(7147)
+var origRealpath = fs.realpath
+var origRealpathSync = fs.realpathSync
+
+var version = process.version
+var ok = /^v[0-5]\./.test(version)
+var old = __nccwpck_require__(1734)
+
+function newError (er) {
+ return er && er.syscall === 'realpath' && (
+ er.code === 'ELOOP' ||
+ er.code === 'ENOMEM' ||
+ er.code === 'ENAMETOOLONG'
+ )
+}
+
+function realpath (p, cache, cb) {
+ if (ok) {
+ return origRealpath(p, cache, cb)
+ }
+
+ if (typeof cache === 'function') {
+ cb = cache
+ cache = null
+ }
+ origRealpath(p, cache, function (er, result) {
+ if (newError(er)) {
+ old.realpath(p, cache, cb)
+ } else {
+ cb(er, result)
+ }
+ })
+}
+
+function realpathSync (p, cache) {
+ if (ok) {
+ return origRealpathSync(p, cache)
+ }
+
+ try {
+ return origRealpathSync(p, cache)
+ } catch (er) {
+ if (newError(er)) {
+ return old.realpathSync(p, cache)
+ } else {
+ throw er
+ }
+ }
+}
+
+function monkeypatch () {
+ fs.realpath = realpath
+ fs.realpathSync = realpathSync
+}
+
+function unmonkeypatch () {
+ fs.realpath = origRealpath
+ fs.realpathSync = origRealpathSync
+}
+
+
+/***/ }),
+
+/***/ 1734:
+/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
+
+// Copyright Joyent, Inc. and other Node contributors.
+//
+// Permission is hereby granted, free of charge, to any person obtaining a
+// copy of this software and associated documentation files (the
+// "Software"), to deal in the Software without restriction, including
+// without limitation the rights to use, copy, modify, merge, publish,
+// distribute, sublicense, and/or sell copies of the Software, and to permit
+// persons to whom the Software is furnished to do so, subject to the
+// following conditions:
+//
+// The above copyright notice and this permission notice shall be included
+// in all copies or substantial portions of the Software.
+//
+// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
+// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
+// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN
+// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM,
+// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR
+// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE
+// USE OR OTHER DEALINGS IN THE SOFTWARE.
+
+var pathModule = __nccwpck_require__(1017);
+var isWindows = process.platform === 'win32';
+var fs = __nccwpck_require__(7147);
+
+// JavaScript implementation of realpath, ported from node pre-v6
+
+var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG);
+
+function rethrow() {
+ // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and
+ // is fairly slow to generate.
+ var callback;
+ if (DEBUG) {
+ var backtrace = new Error;
+ callback = debugCallback;
+ } else
+ callback = missingCallback;
+
+ return callback;
+
+ function debugCallback(err) {
+ if (err) {
+ backtrace.message = err.message;
+ err = backtrace;
+ missingCallback(err);
+ }
+ }
+
+ function missingCallback(err) {
+ if (err) {
+ if (process.throwDeprecation)
+ throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs
+ else if (!process.noDeprecation) {
+ var msg = 'fs: missing callback ' + (err.stack || err.message);
+ if (process.traceDeprecation)
+ console.trace(msg);
+ else
+ console.error(msg);
+ }
+ }
+ }
+}
+
+function maybeCallback(cb) {
+ return typeof cb === 'function' ? cb : rethrow();
+}
+
+var normalize = pathModule.normalize;
+
+// Regexp that finds the next partion of a (partial) path
+// result is [base_with_slash, base], e.g. ['somedir/', 'somedir']
+if (isWindows) {
+ var nextPartRe = /(.*?)(?:[\/\\]+|$)/g;
+} else {
+ var nextPartRe = /(.*?)(?:[\/]+|$)/g;
+}
+
+// Regex to find the device root, including trailing slash. E.g. 'c:\\'.
+if (isWindows) {
+ var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/;
+} else {
+ var splitRootRe = /^[\/]*/;
+}
+
+exports.realpathSync = function realpathSync(p, cache) {
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return cache[p];
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstatSync(base);
+ knownHard[base] = true;
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ // NB: p.length changes.
+ while (pos < p.length) {
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ continue;
+ }
+
+ var resolvedLink;
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // some known symbolic link. no need to stat again.
+ resolvedLink = cache[base];
+ } else {
+ var stat = fs.lstatSync(base);
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ continue;
+ }
+
+ // read the link if it wasn't read before
+ // dev/ino always return 0 on windows, so skip the check.
+ var linkTarget = null;
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ linkTarget = seenLinks[id];
+ }
+ }
+ if (linkTarget === null) {
+ fs.statSync(base);
+ linkTarget = fs.readlinkSync(base);
+ }
+ resolvedLink = pathModule.resolve(previous, linkTarget);
+ // track this, if given a cache.
+ if (cache) cache[base] = resolvedLink;
+ if (!isWindows) seenLinks[id] = linkTarget;
+ }
+
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+
+ if (cache) cache[original] = p;
+
+ return p;
+};
+
+
+exports.realpath = function realpath(p, cache, cb) {
+ if (typeof cb !== 'function') {
+ cb = maybeCallback(cache);
+ cache = null;
+ }
+
+ // make p is absolute
+ p = pathModule.resolve(p);
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, p)) {
+ return process.nextTick(cb.bind(null, null, cache[p]));
+ }
+
+ var original = p,
+ seenLinks = {},
+ knownHard = {};
+
+ // current character position in p
+ var pos;
+ // the partial path so far, including a trailing slash if any
+ var current;
+ // the partial path without a trailing slash (except when pointing at a root)
+ var base;
+ // the partial path scanned in the previous round, with slash
+ var previous;
+
+ start();
+
+ function start() {
+ // Skip over roots
+ var m = splitRootRe.exec(p);
+ pos = m[0].length;
+ current = m[0];
+ base = m[0];
+ previous = '';
+
+ // On windows, check that the root exists. On unix there is no need.
+ if (isWindows && !knownHard[base]) {
+ fs.lstat(base, function(err) {
+ if (err) return cb(err);
+ knownHard[base] = true;
+ LOOP();
+ });
+ } else {
+ process.nextTick(LOOP);
+ }
+ }
+
+ // walk down the path, swapping out linked pathparts for their real
+ // values
+ function LOOP() {
+ // stop if scanned past end of path
+ if (pos >= p.length) {
+ if (cache) cache[original] = p;
+ return cb(null, p);
+ }
+
+ // find the next part
+ nextPartRe.lastIndex = pos;
+ var result = nextPartRe.exec(p);
+ previous = current;
+ current += result[0];
+ base = previous + result[1];
+ pos = nextPartRe.lastIndex;
+
+ // continue if not a symlink
+ if (knownHard[base] || (cache && cache[base] === base)) {
+ return process.nextTick(LOOP);
+ }
+
+ if (cache && Object.prototype.hasOwnProperty.call(cache, base)) {
+ // known symbolic link. no need to stat again.
+ return gotResolvedLink(cache[base]);
+ }
+
+ return fs.lstat(base, gotStat);
+ }
+
+ function gotStat(err, stat) {
+ if (err) return cb(err);
+
+ // if not a symlink, skip to the next path part
+ if (!stat.isSymbolicLink()) {
+ knownHard[base] = true;
+ if (cache) cache[base] = base;
+ return process.nextTick(LOOP);
+ }
+
+ // stat & read the link if not read before
+ // call gotTarget as soon as the link target is known
+ // dev/ino always return 0 on windows, so skip the check.
+ if (!isWindows) {
+ var id = stat.dev.toString(32) + ':' + stat.ino.toString(32);
+ if (seenLinks.hasOwnProperty(id)) {
+ return gotTarget(null, seenLinks[id], base);
+ }
+ }
+ fs.stat(base, function(err) {
+ if (err) return cb(err);
+
+ fs.readlink(base, function(err, target) {
+ if (!isWindows) seenLinks[id] = target;
+ gotTarget(err, target);
+ });
+ });
+ }
+
+ function gotTarget(err, target, base) {
+ if (err) return cb(err);
+
+ var resolvedLink = pathModule.resolve(previous, target);
+ if (cache) cache[base] = resolvedLink;
+ gotResolvedLink(resolvedLink);
+ }
+
+ function gotResolvedLink(resolvedLink) {
+ // resolve the link, then start over
+ p = pathModule.resolve(resolvedLink, p.slice(pos));
+ start();
+ }
+};
+
+
/***/ }),
/***/ 1621:
@@ -12386,6 +18766,117 @@ module.exports = (flag, argv = process.argv) => {
};
+/***/ }),
+
+/***/ 2492:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var wrappy = __nccwpck_require__(2940)
+var reqs = Object.create(null)
+var once = __nccwpck_require__(1223)
+
+module.exports = wrappy(inflight)
+
+function inflight (key, cb) {
+ if (reqs[key]) {
+ reqs[key].push(cb)
+ return null
+ } else {
+ reqs[key] = [cb]
+ return makeres(key)
+ }
+}
+
+function makeres (key) {
+ return once(function RES () {
+ var cbs = reqs[key]
+ var len = cbs.length
+ var args = slice(arguments)
+
+ // XXX It's somewhat ambiguous whether a new callback added in this
+ // pass should be queued for later execution if something in the
+ // list of callbacks throws, or if it should just be discarded.
+ // However, it's such an edge case that it hardly matters, and either
+ // choice is likely as surprising as the other.
+ // As it happens, we do go ahead and schedule it for later execution.
+ try {
+ for (var i = 0; i < len; i++) {
+ cbs[i].apply(null, args)
+ }
+ } finally {
+ if (cbs.length > len) {
+ // added more in the interim.
+ // de-zalgo, just in case, but don't call again.
+ cbs.splice(0, len)
+ process.nextTick(function () {
+ RES.apply(null, args)
+ })
+ } else {
+ delete reqs[key]
+ }
+ }
+ })
+}
+
+function slice (args) {
+ var length = args.length
+ var array = []
+
+ for (var i = 0; i < length; i++) array[i] = args[i]
+ return array
+}
+
+
+/***/ }),
+
+/***/ 4124:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+try {
+ var util = __nccwpck_require__(3837);
+ /* istanbul ignore next */
+ if (typeof util.inherits !== 'function') throw '';
+ module.exports = util.inherits;
+} catch (e) {
+ /* istanbul ignore next */
+ module.exports = __nccwpck_require__(8544);
+}
+
+
+/***/ }),
+
+/***/ 8544:
+/***/ ((module) => {
+
+if (typeof Object.create === 'function') {
+ // implementation from standard node.js 'util' module
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ ctor.prototype = Object.create(superCtor.prototype, {
+ constructor: {
+ value: ctor,
+ enumerable: false,
+ writable: true,
+ configurable: true
+ }
+ })
+ }
+ };
+} else {
+ // old school shim for old browsers
+ module.exports = function inherits(ctor, superCtor) {
+ if (superCtor) {
+ ctor.super_ = superCtor
+ var TempCtor = function () {}
+ TempCtor.prototype = superCtor.prototype
+ ctor.prototype = new TempCtor()
+ ctor.prototype.constructor = ctor
+ }
+ }
+}
+
+
/***/ }),
/***/ 250:
@@ -29987,6 +36478,55 @@ function plural(ms, msAbs, n, name) {
}
+/***/ }),
+
+/***/ 1223:
+/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => {
+
+var wrappy = __nccwpck_require__(2940)
+module.exports = wrappy(once)
+module.exports.strict = wrappy(onceStrict)
+
+once.proto = once(function () {
+ Object.defineProperty(Function.prototype, 'once', {
+ value: function () {
+ return once(this)
+ },
+ configurable: true
+ })
+
+ Object.defineProperty(Function.prototype, 'onceStrict', {
+ value: function () {
+ return onceStrict(this)
+ },
+ configurable: true
+ })
+})
+
+function once (fn) {
+ var f = function () {
+ if (f.called) return f.value
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ f.called = false
+ return f
+}
+
+function onceStrict (fn) {
+ var f = function () {
+ if (f.called)
+ throw new Error(f.onceError)
+ f.called = true
+ return f.value = fn.apply(this, arguments)
+ }
+ var name = fn.name || 'Function wrapped with `once`'
+ f.onceError = name + " shouldn't be called more than once"
+ f.called = false
+ return f
+}
+
+
/***/ }),
/***/ 9318:
@@ -34935,48 +41475,88 @@ function version(uuid) {
var _default = version;
exports["default"] = _default;
+/***/ }),
+
+/***/ 2940:
+/***/ ((module) => {
+
+// Returns a wrapper function that returns a wrapped callback
+// The wrapper function should do some stuff, and return a
+// presumably different callback function.
+// This makes sure that own properties are retained, so that
+// decorations and such are not lost along the way.
+module.exports = wrappy
+function wrappy (fn, cb) {
+ if (fn && cb) return wrappy(fn)(cb)
+
+ if (typeof fn !== 'function')
+ throw new TypeError('need wrapper function')
+
+ Object.keys(fn).forEach(function (k) {
+ wrapper[k] = fn[k]
+ })
+
+ return wrapper
+
+ function wrapper() {
+ var args = new Array(arguments.length)
+ for (var i = 0; i < args.length; i++) {
+ args[i] = arguments[i]
+ }
+ var ret = fn.apply(this, args)
+ var cb = args[args.length-1]
+ if (typeof ret === 'function' && ret !== cb) {
+ Object.keys(cb).forEach(function (k) {
+ ret[k] = cb[k]
+ })
+ }
+ return ret
+ }
+}
+
+
/***/ }),
/***/ 4636:
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.createReleaseFromInputs = void 0;
-const api_client_1 = __nccwpck_require__(586);
-function createReleaseFromInputs(client, parameters) {
- return __awaiter(this, void 0, void 0, function* () {
- client.info('🐙 Creating a release in Octopus Deploy...');
- const command = {
- spaceName: parameters.space,
- ProjectName: parameters.project,
- ChannelName: parameters.channel,
- ReleaseVersion: parameters.releaseNumber,
- PackageVersion: parameters.packageVersion,
- Packages: parameters.packages,
- GitRef: parameters.gitRef,
- GitCommit: parameters.gitCommit,
- ReleaseNotes: parameters.releaseNotes,
- IgnoreIfAlreadyExists: parameters.ignoreExisting,
- IgnoreChannelRules: false
- };
- const repository = new api_client_1.ReleaseRepository(client, parameters.space);
- const allocatedReleaseNumber = yield repository.create(command);
- client.info(`🎉 Release ${allocatedReleaseNumber.ReleaseVersion} created successfully!`);
- return allocatedReleaseNumber.ReleaseVersion;
- });
-}
-exports.createReleaseFromInputs = createReleaseFromInputs;
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.createReleaseFromInputs = void 0;
+const api_client_1 = __nccwpck_require__(586);
+function createReleaseFromInputs(client, parameters) {
+ return __awaiter(this, void 0, void 0, function* () {
+ client.info('🐙 Creating a release in Octopus Deploy...');
+ const command = {
+ spaceName: parameters.space,
+ ProjectName: parameters.project,
+ ChannelName: parameters.channel,
+ ReleaseVersion: parameters.releaseNumber,
+ PackageVersion: parameters.packageVersion,
+ Packages: parameters.packages,
+ GitRef: parameters.gitRef,
+ GitCommit: parameters.gitCommit,
+ ReleaseNotes: parameters.releaseNotes,
+ IgnoreIfAlreadyExists: parameters.ignoreExisting,
+ IgnoreChannelRules: false
+ };
+ const repository = new api_client_1.ReleaseRepository(client, parameters.space);
+ const allocatedReleaseNumber = yield repository.create(command);
+ client.info(`🎉 Release ${allocatedReleaseNumber.ReleaseVersion} created successfully!`);
+ return allocatedReleaseNumber.ReleaseVersion;
+ });
+}
+exports.createReleaseFromInputs = createReleaseFromInputs;
/***/ }),
@@ -34985,67 +41565,67 @@ exports.createReleaseFromInputs = createReleaseFromInputs;
/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) {
"use strict";
-
-var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
- function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
- return new (P || (P = Promise))(function (resolve, reject) {
- function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
- function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
- function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
- step((generator = generator.apply(thisArg, _arguments || [])).next());
- });
-};
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-const input_parameters_1 = __nccwpck_require__(9519);
-const core_1 = __nccwpck_require__(2186);
-const fs_1 = __nccwpck_require__(7147);
-const api_client_1 = __nccwpck_require__(586);
-const api_wrapper_1 = __nccwpck_require__(4636);
-(() => __awaiter(void 0, void 0, void 0, function* () {
- try {
- const logger = {
- debug: message => {
- if ((0, core_1.isDebug)()) {
- (0, core_1.debug)(message);
- }
- },
- info: message => (0, core_1.info)(message),
- warn: message => (0, core_1.warning)(message),
- error: (message, err) => {
- if (err !== undefined) {
- (0, core_1.error)(err.message);
- }
- else {
- (0, core_1.error)(message);
- }
- }
- };
- const parameters = (0, input_parameters_1.getInputParameters)();
- const config = {
- userAgentApp: 'GitHubActions create-release-action',
- instanceURL: parameters.server,
- apiKey: parameters.apiKey,
- logging: logger
- };
- const client = yield api_client_1.Client.create(config);
- const allocatedReleaseNumber = yield (0, api_wrapper_1.createReleaseFromInputs)(client, parameters);
- if (allocatedReleaseNumber) {
- (0, core_1.setOutput)('release_number', allocatedReleaseNumber);
- }
- const stepSummaryFile = process.env.GITHUB_STEP_SUMMARY;
- if (stepSummaryFile && allocatedReleaseNumber) {
- (0, fs_1.writeFileSync)(stepSummaryFile, `🐙 Octopus Deploy Created Release **${allocatedReleaseNumber}** in Project **${parameters.project}**.`);
- }
- }
- catch (e) {
- if (e instanceof Error) {
- (0, core_1.setFailed)(e);
- }
- else {
- (0, core_1.setFailed)(`Unknown error: ${e}`);
- }
- }
-}))();
+
+var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) {
+ function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); }
+ return new (P || (P = Promise))(function (resolve, reject) {
+ function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } }
+ function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } }
+ function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); }
+ step((generator = generator.apply(thisArg, _arguments || [])).next());
+ });
+};
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+const input_parameters_1 = __nccwpck_require__(9519);
+const core_1 = __nccwpck_require__(2186);
+const fs_1 = __nccwpck_require__(7147);
+const api_client_1 = __nccwpck_require__(586);
+const api_wrapper_1 = __nccwpck_require__(4636);
+(() => __awaiter(void 0, void 0, void 0, function* () {
+ try {
+ const logger = {
+ debug: message => {
+ if ((0, core_1.isDebug)()) {
+ (0, core_1.debug)(message);
+ }
+ },
+ info: message => (0, core_1.info)(message),
+ warn: message => (0, core_1.warning)(message),
+ error: (message, err) => {
+ if (err !== undefined) {
+ (0, core_1.error)(err.message);
+ }
+ else {
+ (0, core_1.error)(message);
+ }
+ }
+ };
+ const parameters = (0, input_parameters_1.getInputParameters)();
+ const config = {
+ userAgentApp: 'GitHubActions create-release-action',
+ instanceURL: parameters.server,
+ apiKey: parameters.apiKey,
+ logging: logger
+ };
+ const client = yield api_client_1.Client.create(config);
+ const allocatedReleaseNumber = yield (0, api_wrapper_1.createReleaseFromInputs)(client, parameters);
+ if (allocatedReleaseNumber) {
+ (0, core_1.setOutput)('release_number', allocatedReleaseNumber);
+ }
+ const stepSummaryFile = process.env.GITHUB_STEP_SUMMARY;
+ if (stepSummaryFile && allocatedReleaseNumber) {
+ (0, fs_1.writeFileSync)(stepSummaryFile, `🐙 Octopus Deploy Created Release **${allocatedReleaseNumber}** in Project **${parameters.project}**.`);
+ }
+ }
+ catch (e) {
+ if (e instanceof Error) {
+ (0, core_1.setFailed)(e);
+ }
+ else {
+ (0, core_1.setFailed)(`Unknown error: ${e}`);
+ }
+ }
+}))();
/***/ }),
@@ -35054,46 +41634,54 @@ const api_wrapper_1 = __nccwpck_require__(4636);
/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => {
"use strict";
-
-Object.defineProperty(exports, "__esModule", ({ value: true }));
-exports.getInputParameters = void 0;
-const core_1 = __nccwpck_require__(2186);
-const EnvironmentVariables = {
- URL: 'OCTOPUS_URL',
- ApiKey: 'OCTOPUS_API_KEY',
- Space: 'OCTOPUS_SPACE'
-};
-function getInputParameters() {
- const parameters = {
- server: (0, core_1.getInput)('server') || process.env[EnvironmentVariables.URL] || '',
- apiKey: (0, core_1.getInput)('api_key') || process.env[EnvironmentVariables.ApiKey] || '',
- space: (0, core_1.getInput)('space') || process.env[EnvironmentVariables.Space] || '',
- project: (0, core_1.getInput)('project', { required: true }),
- releaseNumber: (0, core_1.getInput)('release_number') || undefined,
- channel: (0, core_1.getInput)('channel') || undefined,
- packageVersion: (0, core_1.getInput)('package_version') || undefined,
- packages: (0, core_1.getMultilineInput)('packages').map(p => p.trim()) || undefined,
- gitRef: (0, core_1.getInput)('git_ref') || undefined,
- gitCommit: (0, core_1.getInput)('git_commit') || undefined,
- ignoreExisting: (0, core_1.getBooleanInput)('ignore_existing') || undefined,
- releaseNotes: (0, core_1.getInput)('release_notes') || undefined
- };
- const errors = [];
- if (!parameters.server) {
- errors.push("The Octopus instance URL is required, please specify explictly through the 'server' input or set the OCTOPUS_URL environment variable.");
- }
- if (!parameters.apiKey) {
- errors.push("The Octopus API Key is required, please specify explictly through the 'api_key' input or set the OCTOPUS_API_KEY environment variable.");
- }
- if (!parameters.space) {
- errors.push("The Octopus space name is required, please specify explictly through the 'space' input or set the OCTOPUS_SPACE environment variable.");
- }
- if (errors.length > 0) {
- throw new Error(errors.join('\n'));
- }
- return parameters;
-}
-exports.getInputParameters = getInputParameters;
+
+Object.defineProperty(exports, "__esModule", ({ value: true }));
+exports.getInputParameters = void 0;
+const core_1 = __nccwpck_require__(2186);
+const EnvironmentVariables = {
+ URL: 'OCTOPUS_URL',
+ ApiKey: 'OCTOPUS_API_KEY',
+ Space: 'OCTOPUS_SPACE'
+};
+function getInputParameters() {
+ const parameters = {
+ server: (0, core_1.getInput)('server') || process.env[EnvironmentVariables.URL] || '',
+ apiKey: (0, core_1.getInput)('api_key') || process.env[EnvironmentVariables.ApiKey] || '',
+ space: (0, core_1.getInput)('space') || process.env[EnvironmentVariables.Space] || '',
+ project: (0, core_1.getInput)('project', { required: true }),
+ releaseNumber: (0, core_1.getInput)('release_number') || undefined,
+ channel: (0, core_1.getInput)('channel') || undefined,
+ packageVersion: (0, core_1.getInput)('package_version') || undefined,
+ packages: (0, core_1.getMultilineInput)('packages').map(p => p.trim()) || undefined,
+ gitRef: (0, core_1.getInput)('git_ref') || undefined,
+ gitCommit: (0, core_1.getInput)('git_commit') || undefined,
+ ignoreExisting: (0, core_1.getBooleanInput)('ignore_existing') || undefined,
+ releaseNotes: (0, core_1.getInput)('release_notes') || undefined
+ };
+ const errors = [];
+ if (!parameters.server) {
+ errors.push("The Octopus instance URL is required, please specify explictly through the 'server' input or set the OCTOPUS_URL environment variable.");
+ }
+ if (!parameters.apiKey) {
+ errors.push("The Octopus API Key is required, please specify explictly through the 'api_key' input or set the OCTOPUS_API_KEY environment variable.");
+ }
+ if (!parameters.space) {
+ errors.push("The Octopus space name is required, please specify explictly through the 'space' input or set the OCTOPUS_SPACE environment variable.");
+ }
+ if (errors.length > 0) {
+ throw new Error(errors.join('\n'));
+ }
+ return parameters;
+}
+exports.getInputParameters = getInputParameters;
+
+
+/***/ }),
+
+/***/ 2941:
+/***/ ((module) => {
+
+module.exports = eval("require")("original-fs");
/***/ }),
diff --git a/package-lock.json b/package-lock.json
index 146d3360..6d4a2483 100644
--- a/package-lock.json
+++ b/package-lock.json
@@ -1,12 +1,12 @@
{
"name": "create-release-action",
- "version": "2.1.0",
+ "version": "3.0.0",
"lockfileVersion": 2,
"requires": true,
"packages": {
"": {
"name": "create-release-action",
- "version": "2.1.0",
+ "version": "3.0.0",
"license": "Apache-2.0",
"dependencies": {
"@actions/core": "^1.10.0",
diff --git a/package.json b/package.json
index 232eb383..422d7951 100644
--- a/package.json
+++ b/package.json
@@ -84,5 +84,5 @@
"test:unit": "jest --ci --reporters=default --reporters=jest-junit --testPathPattern=__tests__/unit",
"test:integration": "jest --ci --reporters=default --reporters=jest-junit --testPathPattern=__tests__/integration"
},
- "version": "2.1.0"
+ "version": "3.0.0"
}