diff --git a/CHANGELOG.md b/CHANGELOG.md index d4cc6fb..13494ef 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -5,7 +5,7 @@ The format is based on [Keep a Changelog](https://keepachangelog.com/en/1.0.0/). ## [Unreleased] ### Changed -- Update dependencies. +- Update dependencies, which adds rate-limiting when accessing the GitHub API. ### Fixed - Allow to write any file from the Docker container. [#118](https://github.com/zaproxy/action-baseline/issues/118) diff --git a/dist/index.js b/dist/index.js index 457304f..6d2115a 100644 --- a/dist/index.js +++ b/dist/index.js @@ -5,9 +5,10 @@ /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; +var __webpack_unused_export__; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.create = void 0; +__webpack_unused_export__ = ({ value: true }); +exports.U = void 0; const artifact_client_1 = __nccwpck_require__(8802); /** * Constructs an ArtifactClient @@ -15,7 +16,7 @@ const artifact_client_1 = __nccwpck_require__(8802); function create() { return artifact_client_1.DefaultArtifactClient.create(); } -exports.create = create; +exports.U = create; //# sourceMappingURL=artifact-client.js.map /***/ }), @@ -27,7 +28,11 @@ exports.create = create; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -40,7 +45,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -78,9 +83,9 @@ class DefaultArtifactClient { return __awaiter(this, void 0, void 0, function* () { core.info(`Starting artifact upload For more detailed logs during the artifact upload process, enable step-debugging: https://docs.github.com/actions/monitoring-and-troubleshooting-workflows/enabling-debug-logging#enabling-step-debug-logging`); - path_and_artifact_name_validation_1.checkArtifactName(name); + (0, path_and_artifact_name_validation_1.checkArtifactName)(name); // Get specification for the files being uploaded - const uploadSpecification = upload_specification_1.getUploadSpecification(name, rootDirectory, files); + const uploadSpecification = (0, upload_specification_1.getUploadSpecification)(name, rootDirectory, files); const uploadResponse = { artifactName: name, artifactItems: [], @@ -139,20 +144,20 @@ Note: The size of downloaded zips can differ significantly from the reported siz } const items = yield downloadHttpClient.getContainerItems(artifactToDownload.name, artifactToDownload.fileContainerResourceUrl); if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); + path = (0, config_variables_1.getWorkSpaceDirectory)(); } - path = path_1.normalize(path); - path = path_1.resolve(path); + path = (0, path_1.normalize)(path); + path = (0, path_1.resolve)(path); // During upload, empty directories are rejected by the remote server so there should be no artifacts that consist of only empty directories - const downloadSpecification = download_specification_1.getDownloadSpecification(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(name, items.value, path, (options === null || options === void 0 ? void 0 : options.createArtifactFolder) || false); if (downloadSpecification.filesToDownload.length === 0) { core.info(`No downloadable files were found for the artifact: ${artifactToDownload.name}`); } else { // Create all necessary directories recursively before starting any download - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - core.info('Directory structure has been setup for the artifact'); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); + core.info('Directory structure has been set up for the artifact'); + yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } return { @@ -171,10 +176,10 @@ Note: The size of downloaded zips can differ significantly from the reported siz return response; } if (!path) { - path = config_variables_1.getWorkSpaceDirectory(); + path = (0, config_variables_1.getWorkSpaceDirectory)(); } - path = path_1.normalize(path); - path = path_1.resolve(path); + path = (0, path_1.normalize)(path); + path = (0, path_1.resolve)(path); let downloadedArtifacts = 0; while (downloadedArtifacts < artifacts.count) { const currentArtifactToDownload = artifacts.value[downloadedArtifacts]; @@ -182,13 +187,13 @@ Note: The size of downloaded zips can differ significantly from the reported siz core.info(`starting download of artifact ${currentArtifactToDownload.name} : ${downloadedArtifacts}/${artifacts.count}`); // Get container entries for the specific artifact const items = yield downloadHttpClient.getContainerItems(currentArtifactToDownload.name, currentArtifactToDownload.fileContainerResourceUrl); - const downloadSpecification = download_specification_1.getDownloadSpecification(currentArtifactToDownload.name, items.value, path, true); + const downloadSpecification = (0, download_specification_1.getDownloadSpecification)(currentArtifactToDownload.name, items.value, path, true); if (downloadSpecification.filesToDownload.length === 0) { core.info(`No downloadable files were found for any artifact ${currentArtifactToDownload.name}`); } else { - yield utils_1.createDirectoriesForArtifact(downloadSpecification.directoryStructure); - yield utils_1.createEmptyFilesForArtifact(downloadSpecification.emptyFilesToCreate); + yield (0, utils_1.createDirectoriesForArtifact)(downloadSpecification.directoryStructure); + yield (0, utils_1.createEmptyFilesForArtifact)(downloadSpecification.emptyFilesToCreate); yield downloadHttpClient.downloadSingleArtifact(downloadSpecification.filesToDownload); } response.push({ @@ -211,7 +216,7 @@ exports.DefaultArtifactClient = DefaultArtifactClient; "use strict"; Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; +exports.isGhes = exports.getRetentionDays = exports.getWorkSpaceDirectory = exports.getWorkFlowRunId = exports.getRuntimeUrl = exports.getRuntimeToken = exports.getDownloadFileConcurrency = exports.getInitialRetryIntervalInMilliseconds = exports.getRetryMultiplier = exports.getRetryLimit = exports.getUploadChunkSize = exports.getUploadFileConcurrency = void 0; // The number of concurrent uploads that happens at the same time function getUploadFileConcurrency() { return 2; @@ -280,6 +285,11 @@ function getRetentionDays() { return process.env['GITHUB_RETENTION_DAYS']; } exports.getRetentionDays = getRetentionDays; +function isGhes() { + const ghUrl = new URL(process.env['GITHUB_SERVER_URL'] || 'https://github.com'); + return ghUrl.hostname.toUpperCase() !== 'GITHUB.COM'; +} +exports.isGhes = isGhes; //# sourceMappingURL=config-variables.js.map /***/ }), @@ -601,7 +611,11 @@ exports["default"] = CRC64; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -614,7 +628,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -641,7 +655,7 @@ const config_variables_1 = __nccwpck_require__(2222); const requestUtils_1 = __nccwpck_require__(755); class DownloadHttpClient { constructor() { - this.downloadHttpManager = new http_manager_1.HttpManager(config_variables_1.getDownloadFileConcurrency(), '@actions/artifact-download'); + this.downloadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getDownloadFileConcurrency)(), '@actions/artifact-download'); // downloads are usually significantly faster than uploads so display status information every second this.statusReporter = new status_reporter_1.StatusReporter(1000); } @@ -650,11 +664,11 @@ class DownloadHttpClient { */ listArtifacts() { return __awaiter(this, void 0, void 0, function* () { - const artifactUrl = utils_1.getArtifactUrl(); + const artifactUrl = (0, utils_1.getArtifactUrl)(); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); + const headers = (0, utils_1.getDownloadHeaders)('application/json'); + const response = yield (0, requestUtils_1.retryHttpClientRequest)('List Artifacts', () => __awaiter(this, void 0, void 0, function* () { return client.get(artifactUrl, headers); })); const body = yield response.readBody(); return JSON.parse(body); }); @@ -671,8 +685,8 @@ class DownloadHttpClient { resourceUrl.searchParams.append('itemPath', artifactName); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.downloadHttpManager.getClient(0); - const headers = utils_1.getDownloadHeaders('application/json'); - const response = yield requestUtils_1.retryHttpClientRequest('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); + const headers = (0, utils_1.getDownloadHeaders)('application/json'); + const response = yield (0, requestUtils_1.retryHttpClientRequest)('Get Container Items', () => __awaiter(this, void 0, void 0, function* () { return client.get(resourceUrl.toString(), headers); })); const body = yield response.readBody(); return JSON.parse(body); }); @@ -683,7 +697,7 @@ class DownloadHttpClient { */ downloadSingleArtifact(downloadItems) { return __awaiter(this, void 0, void 0, function* () { - const DOWNLOAD_CONCURRENCY = config_variables_1.getDownloadFileConcurrency(); + const DOWNLOAD_CONCURRENCY = (0, config_variables_1.getDownloadFileConcurrency)(); // limit the number of files downloaded at a single time core.debug(`Download file concurrency is set to ${DOWNLOAD_CONCURRENCY}`); const parallelDownloads = [...new Array(DOWNLOAD_CONCURRENCY).keys()]; @@ -723,9 +737,9 @@ class DownloadHttpClient { downloadIndividualFile(httpClientIndex, artifactLocation, downloadPath) { return __awaiter(this, void 0, void 0, function* () { let retryCount = 0; - const retryLimit = config_variables_1.getRetryLimit(); + const retryLimit = (0, config_variables_1.getRetryLimit)(); let destinationStream = fs.createWriteStream(downloadPath); - const headers = utils_1.getDownloadHeaders('application/json', true, true); + const headers = (0, utils_1.getDownloadHeaders)('application/json', true, true); // a single GET request is used to download a file const makeDownloadRequest = () => __awaiter(this, void 0, void 0, function* () { const client = this.downloadHttpManager.getClient(httpClientIndex); @@ -749,13 +763,13 @@ class DownloadHttpClient { if (retryAfterValue) { // Back off by waiting the specified time denoted by the retry-after header core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the download`); - yield utils_1.sleep(retryAfterValue); + yield (0, utils_1.sleep)(retryAfterValue); } else { // Back off using an exponential value that depends on the retry count - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the download`); - yield utils_1.sleep(backoffTime); + yield (0, utils_1.sleep)(backoffTime); } core.info(`Finished backoff for retry #${retryCount}, continuing with download`); } @@ -779,7 +793,7 @@ class DownloadHttpClient { resolve(); } }); - yield utils_1.rmFile(fileDownloadPath); + yield (0, utils_1.rmFile)(fileDownloadPath); destinationStream = fs.createWriteStream(fileDownloadPath); }); // keep trying to download a file until a retry limit has been reached @@ -798,7 +812,7 @@ class DownloadHttpClient { continue; } let forceRetry = false; - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { // The body contains the contents of the file however calling response.readBody() causes all the content to be converted to a string // which can cause some gzip encoded data to be lost // Instead of using response.readBody(), response.message is a readableStream that can be directly used to get the raw body contents @@ -806,7 +820,7 @@ class DownloadHttpClient { const isGzipped = isGzip(response.message.headers); yield this.pipeResponseToFile(response, destinationStream, isGzipped); if (isGzipped || - isAllBytesReceived(response.message.headers['content-length'], yield utils_1.getFileSize(downloadPath))) { + isAllBytesReceived(response.message.headers['content-length'], yield (0, utils_1.getFileSize)(downloadPath))) { return; } else { @@ -818,17 +832,17 @@ class DownloadHttpClient { forceRetry = true; } } - if (forceRetry || utils_1.isRetryableStatusCode(response.message.statusCode)) { + if (forceRetry || (0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { core.info(`A ${response.message.statusCode} response code has been received while attempting to download an artifact`); resetDestinationStream(downloadPath); // if a throttled status code is received, try to get the retryAfter header value, else differ to standard exponential backoff - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + (0, utils_1.isThrottledStatusCode)(response.message.statusCode) + ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { // Some unexpected response code, fail immediately and stop the download - utils_1.displayHttpDiagnostics(response); + (0, utils_1.displayHttpDiagnostics)(response); return Promise.reject(new Error(`Unexpected http ${response.message.statusCode} during download for ${artifactLocation}`)); } } @@ -847,14 +861,14 @@ class DownloadHttpClient { const gunzip = zlib.createGunzip(); response.message .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); + core.info(`An error occurred while attempting to read the response stream`); gunzip.close(); destinationStream.close(); reject(error); }) .pipe(gunzip) .on('error', error => { - core.error(`An error occurred while attempting to decompress the response stream`); + core.info(`An error occurred while attempting to decompress the response stream`); destinationStream.close(); reject(error); }) @@ -863,14 +877,14 @@ class DownloadHttpClient { resolve(); }) .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error); }); } else { response.message .on('error', error => { - core.error(`An error occurred while attempting to read the response stream`); + core.info(`An error occurred while attempting to read the response stream`); destinationStream.close(); reject(error); }) @@ -879,7 +893,7 @@ class DownloadHttpClient { resolve(); }) .on('error', error => { - core.error(`An error occurred while writing a downloaded file to ${destinationStream.path}`); + core.info(`An error occurred while writing a downloaded file to ${destinationStream.path}`); reject(error); }); } @@ -900,7 +914,11 @@ exports.DownloadHttpClient = DownloadHttpClient; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -913,7 +931,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -991,7 +1009,7 @@ class HttpManager { throw new Error('There must be at least one client'); } this.userAgent = userAgent; - this.clients = new Array(clientCount).fill(utils_1.createHttpClient(userAgent)); + this.clients = new Array(clientCount).fill((0, utils_1.createHttpClient)(userAgent)); } getClient(index) { return this.clients[index]; @@ -1000,7 +1018,7 @@ class HttpManager { // for more information see: https://github.com/actions/http-client/blob/04e5ad73cd3fd1f5610a32116b0759eddf6570d2/index.ts#L292 disposeAndReplaceClient(index) { this.clients[index].dispose(); - this.clients[index] = utils_1.createHttpClient(this.userAgent); + this.clients[index] = (0, utils_1.createHttpClient)(this.userAgent); } disposeAndReplaceAllClients() { for (const [index] of this.clients.entries()) { @@ -1061,7 +1079,7 @@ Invalid characters include: ${Array.from(invalidArtifactNameCharacters.values()) These characters are not allowed in the artifact name due to limitations with certain file systems such as NTFS. To maintain file system agnostic behavior, these characters are intentionally not allowed to prevent potential problems with downloads on different file systems.`); } } - core_1.info(`Artifact name is valid!`); + (0, core_1.info)(`Artifact name is valid!`); } exports.checkArtifactName = checkArtifactName; /** @@ -1094,7 +1112,11 @@ exports.checkArtifactFilePath = checkArtifactFilePath; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1107,7 +1129,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1137,14 +1159,14 @@ function retry(name, operation, customErrorMessages, maxAttempts) { try { response = yield operation(); statusCode = response.message.statusCode; - if (utils_1.isSuccessStatusCode(statusCode)) { + if ((0, utils_1.isSuccessStatusCode)(statusCode)) { return response; } // Extra error information that we want to display if a particular response code is hit if (statusCode) { customErrorInformation = customErrorMessages.get(statusCode); } - isRetryable = utils_1.isRetryableStatusCode(statusCode); + isRetryable = (0, utils_1.isRetryableStatusCode)(statusCode); errorMessage = `Artifact service responded with ${statusCode}`; } catch (error) { @@ -1154,16 +1176,16 @@ function retry(name, operation, customErrorMessages, maxAttempts) { if (!isRetryable) { core.info(`${name} - Error is not retryable`); if (response) { - utils_1.displayHttpDiagnostics(response); + (0, utils_1.displayHttpDiagnostics)(response); } break; } core.info(`${name} - Attempt ${attempt} of ${maxAttempts} failed with error: ${errorMessage}`); - yield utils_1.sleep(utils_1.getExponentialRetryTimeInMilliseconds(attempt)); + yield (0, utils_1.sleep)((0, utils_1.getExponentialRetryTimeInMilliseconds)(attempt)); attempt++; } if (response) { - utils_1.displayHttpDiagnostics(response); + (0, utils_1.displayHttpDiagnostics)(response); } if (customErrorInformation) { throw Error(`${name} failed: ${customErrorInformation}`); @@ -1172,7 +1194,7 @@ function retry(name, operation, customErrorMessages, maxAttempts) { }); } exports.retry = retry; -function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = config_variables_1.getRetryLimit()) { +function retryHttpClientRequest(name, method, customErrorMessages = new Map(), maxAttempts = (0, config_variables_1.getRetryLimit)()) { return __awaiter(this, void 0, void 0, function* () { return yield retry(name, method, customErrorMessages, maxAttempts); }); @@ -1214,14 +1236,14 @@ class StatusReporter { this.totalFileStatus = setInterval(() => { // display 1 decimal place without any rounding const percentage = this.formatPercentage(this.processedCount, this.totalNumberOfFilesToProcess); - core_1.info(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); + (0, core_1.info)(`Total file count: ${this.totalNumberOfFilesToProcess} ---- Processed file #${this.processedCount} (${percentage.slice(0, percentage.indexOf('.') + 2)}%)`); }, this.displayFrequencyInMilliseconds); } // if there is a large file that is being uploaded in chunks, this is used to display extra information about the status of the upload updateLargeFileStatus(fileName, chunkStartIndex, chunkEndIndex, totalUploadFileSize) { // display 1 decimal place without any rounding const percentage = this.formatPercentage(chunkEndIndex, totalUploadFileSize); - core_1.info(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); + (0, core_1.info)(`Uploaded ${fileName} (${percentage.slice(0, percentage.indexOf('.') + 2)}%) bytes ${chunkStartIndex}:${chunkEndIndex}`); } stop() { if (this.totalFileStatus) { @@ -1248,7 +1270,11 @@ exports.StatusReporter = StatusReporter; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1261,7 +1287,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1286,19 +1312,34 @@ exports.createGZipFileInBuffer = exports.createGZipFileOnDisk = void 0; const fs = __importStar(__nccwpck_require__(7147)); const zlib = __importStar(__nccwpck_require__(9796)); const util_1 = __nccwpck_require__(3837); -const stat = util_1.promisify(fs.stat); +const stat = (0, util_1.promisify)(fs.stat); /** * GZipping certain files that are already compressed will likely not yield further size reductions. Creating large temporary gzip * files then will just waste a lot of time before ultimately being discarded (especially for very large files). * If any of these types of files are encountered then on-disk gzip creation will be skipped and the original file will be uploaded as-is */ const gzipExemptFileExtensions = [ + '.gz', '.gzip', + '.tgz', + '.taz', + '.Z', + '.taZ', + '.bz2', + '.tbz', + '.tbz2', + '.tz2', + '.lz', + '.lzma', + '.tlz', + '.lzo', + '.xz', + '.txz', + '.zst', + '.zstd', + '.tzst', '.zip', - '.tar.lz', - '.tar.gz', - '.tar.bz2', - '.7z' + '.7z' // 7ZIP ]; /** * Creates a Gzip compressed file of an original file at the provided temporary filepath location @@ -1327,7 +1368,7 @@ function createGZipFileOnDisk(originalFilePath, tempFilePath) { outputStream.on('error', error => { // eslint-disable-next-line no-console console.log(error); - reject; + reject(error); }); }); }); @@ -1341,22 +1382,29 @@ exports.createGZipFileOnDisk = createGZipFileOnDisk; function createGZipFileInBuffer(originalFilePath) { return __awaiter(this, void 0, void 0, function* () { return new Promise((resolve) => __awaiter(this, void 0, void 0, function* () { - var e_1, _a; + var _a, e_1, _b, _c; const inputStream = fs.createReadStream(originalFilePath); const gzip = zlib.createGzip(); inputStream.pipe(gzip); // read stream into buffer, using experimental async iterators see https://github.com/nodejs/readable-stream/issues/403#issuecomment-479069043 const chunks = []; try { - for (var gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), !gzip_1_1.done;) { - const chunk = gzip_1_1.value; - chunks.push(chunk); + for (var _d = true, gzip_1 = __asyncValues(gzip), gzip_1_1; gzip_1_1 = yield gzip_1.next(), _a = gzip_1_1.done, !_a;) { + _c = gzip_1_1.value; + _d = false; + try { + const chunk = _c; + chunks.push(chunk); + } + finally { + _d = true; + } } } catch (e_1_1) { e_1 = { error: e_1_1 }; } finally { try { - if (gzip_1_1 && !gzip_1_1.done && (_a = gzip_1.return)) yield _a.call(gzip_1); + if (!_d && !_a && (_b = gzip_1.return)) yield _b.call(gzip_1); } finally { if (e_1) throw e_1.error; } } @@ -1376,7 +1424,11 @@ exports.createGZipFileInBuffer = createGZipFileInBuffer; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1389,7 +1441,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1418,10 +1470,10 @@ const http_client_1 = __nccwpck_require__(6255); const http_manager_1 = __nccwpck_require__(6527); const upload_gzip_1 = __nccwpck_require__(606); const requestUtils_1 = __nccwpck_require__(755); -const stat = util_1.promisify(fs.stat); +const stat = (0, util_1.promisify)(fs.stat); class UploadHttpClient { constructor() { - this.uploadHttpManager = new http_manager_1.HttpManager(config_variables_1.getUploadFileConcurrency(), '@actions/artifact-upload'); + this.uploadHttpManager = new http_manager_1.HttpManager((0, config_variables_1.getUploadFileConcurrency)(), '@actions/artifact-upload'); this.statusReporter = new status_reporter_1.StatusReporter(10000); } /** @@ -1437,28 +1489,30 @@ class UploadHttpClient { }; // calculate retention period if (options && options.retentionDays) { - const maxRetentionStr = config_variables_1.getRetentionDays(); - parameters.RetentionDays = utils_1.getProperRetention(options.retentionDays, maxRetentionStr); + const maxRetentionStr = (0, config_variables_1.getRetentionDays)(); + parameters.RetentionDays = (0, utils_1.getProperRetention)(options.retentionDays, maxRetentionStr); } const data = JSON.stringify(parameters, null, 2); - const artifactUrl = utils_1.getArtifactUrl(); + const artifactUrl = (0, utils_1.getArtifactUrl)(); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); + const headers = (0, utils_1.getUploadHeaders)('application/json', false); // Extra information to display when a particular HTTP code is returned // If a 403 is returned when trying to create a file container, the customer has exceeded // their storage quota so no new artifact containers can be created const customErrorMessages = new Map([ [ http_client_1.HttpCodes.Forbidden, - 'Artifact storage quota has been hit. Unable to upload any new artifacts' + (0, config_variables_1.isGhes)() + ? 'Please reference [Enabling GitHub Actions for GitHub Enterprise Server](https://docs.github.com/en/enterprise-server@3.8/admin/github-actions/enabling-github-actions-for-github-enterprise-server) to ensure Actions storage is configured correctly.' + : 'Artifact storage quota has been hit. Unable to upload any new artifacts' ], [ http_client_1.HttpCodes.BadRequest, `The artifact name ${artifactName} is not valid. Request URL ${artifactUrl}` ] ]); - const response = yield requestUtils_1.retryHttpClientRequest('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); + const response = yield (0, requestUtils_1.retryHttpClientRequest)('Create Artifact Container', () => __awaiter(this, void 0, void 0, function* () { return client.post(artifactUrl, data, headers); }), customErrorMessages); const body = yield response.readBody(); return JSON.parse(body); }); @@ -1471,8 +1525,8 @@ class UploadHttpClient { */ uploadArtifactToFileContainer(uploadUrl, filesToUpload, options) { return __awaiter(this, void 0, void 0, function* () { - const FILE_CONCURRENCY = config_variables_1.getUploadFileConcurrency(); - const MAX_CHUNK_SIZE = config_variables_1.getUploadChunkSize(); + const FILE_CONCURRENCY = (0, config_variables_1.getUploadFileConcurrency)(); + const MAX_CHUNK_SIZE = (0, config_variables_1.getUploadChunkSize)(); core.debug(`File Concurrency: ${FILE_CONCURRENCY}, and Chunk Size: ${MAX_CHUNK_SIZE}`); const parameters = []; // by default, file uploads will continue if there is an error unless specified differently in the options @@ -1562,7 +1616,7 @@ class UploadHttpClient { // with named pipes the file size is reported as zero in that case don't read the file in memory if (!isFIFO && totalFileSize < 65536) { core.debug(`${parameters.file} is less than 64k in size. Creating a gzip file in-memory to potentially reduce the upload size`); - const buffer = yield upload_gzip_1.createGZipFileInBuffer(parameters.file); + const buffer = yield (0, upload_gzip_1.createGZipFileInBuffer)(parameters.file); // An open stream is needed in the event of a failure and we need to retry. If a NodeJS.ReadableStream is directly passed in, // it will not properly get reset to the start of the stream if a chunk upload needs to be retried let openUploadStream; @@ -1602,7 +1656,7 @@ class UploadHttpClient { const tempFile = yield tmp.file(); core.debug(`${parameters.file} is greater than 64k in size. Creating a gzip file on-disk ${tempFile.path} to potentially reduce the upload size`); // create a GZip file of the original file being uploaded, the original file should not be modified in any way - uploadFileSize = yield upload_gzip_1.createGZipFileOnDisk(parameters.file, tempFile.path); + uploadFileSize = yield (0, upload_gzip_1.createGZipFileOnDisk)(parameters.file, tempFile.path); let uploadFilePath = tempFile.path; // compression did not help with size reduction, use the original file for upload and delete the temp GZip file // for named pipes totalFileSize is zero, this assumes compression did help @@ -1675,22 +1729,22 @@ class UploadHttpClient { uploadChunk(httpClientIndex, resourceUrl, openStream, start, end, uploadFileSize, isGzip, totalFileSize) { return __awaiter(this, void 0, void 0, function* () { // open a new stream and read it to compute the digest - const digest = yield utils_1.digestForStream(openStream()); + const digest = yield (0, utils_1.digestForStream)(openStream()); // prepare all the necessary headers before making any http call - const headers = utils_1.getUploadHeaders('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, utils_1.getContentRange(start, end, uploadFileSize), digest); + const headers = (0, utils_1.getUploadHeaders)('application/octet-stream', true, isGzip, totalFileSize, end - start + 1, (0, utils_1.getContentRange)(start, end, uploadFileSize), digest); const uploadChunkRequest = () => __awaiter(this, void 0, void 0, function* () { const client = this.uploadHttpManager.getClient(httpClientIndex); return yield client.sendStream('PUT', resourceUrl, openStream(), headers); }); let retryCount = 0; - const retryLimit = config_variables_1.getRetryLimit(); + const retryLimit = (0, config_variables_1.getRetryLimit)(); // Increments the current retry count and then checks if the retry limit has been reached // If there have been too many retries, fail so the download stops const incrementAndCheckRetryLimit = (response) => { retryCount++; if (retryCount > retryLimit) { if (response) { - utils_1.displayHttpDiagnostics(response); + (0, utils_1.displayHttpDiagnostics)(response); } core.info(`Retry limit has been reached for chunk at offset ${start} to ${resourceUrl}`); return true; @@ -1701,12 +1755,12 @@ class UploadHttpClient { this.uploadHttpManager.disposeAndReplaceClient(httpClientIndex); if (retryAfterValue) { core.info(`Backoff due to too many requests, retry #${retryCount}. Waiting for ${retryAfterValue} milliseconds before continuing the upload`); - yield utils_1.sleep(retryAfterValue); + yield (0, utils_1.sleep)(retryAfterValue); } else { - const backoffTime = utils_1.getExponentialRetryTimeInMilliseconds(retryCount); + const backoffTime = (0, utils_1.getExponentialRetryTimeInMilliseconds)(retryCount); core.info(`Exponential backoff for retry #${retryCount}. Waiting for ${backoffTime} milliseconds before continuing the upload at offset ${start}`); - yield utils_1.sleep(backoffTime); + yield (0, utils_1.sleep)(backoffTime); } core.info(`Finished backoff for retry #${retryCount}, continuing with upload`); return; @@ -1731,21 +1785,21 @@ class UploadHttpClient { // Always read the body of the response. There is potential for a resource leak if the body is not read which will // result in the connection remaining open along with unintended consequences when trying to dispose of the client yield response.readBody(); - if (utils_1.isSuccessStatusCode(response.message.statusCode)) { + if ((0, utils_1.isSuccessStatusCode)(response.message.statusCode)) { return true; } - else if (utils_1.isRetryableStatusCode(response.message.statusCode)) { + else if ((0, utils_1.isRetryableStatusCode)(response.message.statusCode)) { core.info(`A ${response.message.statusCode} status code has been received, will attempt to retry the upload`); if (incrementAndCheckRetryLimit(response)) { return false; } - utils_1.isThrottledStatusCode(response.message.statusCode) - ? yield backOff(utils_1.tryGetRetryAfterValueTimeInMilliseconds(response.message.headers)) + (0, utils_1.isThrottledStatusCode)(response.message.statusCode) + ? yield backOff((0, utils_1.tryGetRetryAfterValueTimeInMilliseconds)(response.message.headers)) : yield backOff(); } else { core.error(`Unexpected response. Unable to upload chunk to ${resourceUrl}`); - utils_1.displayHttpDiagnostics(response); + (0, utils_1.displayHttpDiagnostics)(response); return false; } } @@ -1758,14 +1812,14 @@ class UploadHttpClient { */ patchArtifactSize(size, artifactName) { return __awaiter(this, void 0, void 0, function* () { - const resourceUrl = new url_1.URL(utils_1.getArtifactUrl()); + const resourceUrl = new url_1.URL((0, utils_1.getArtifactUrl)()); resourceUrl.searchParams.append('artifactName', artifactName); const parameters = { Size: size }; const data = JSON.stringify(parameters, null, 2); core.debug(`URL is ${resourceUrl.toString()}`); // use the first client from the httpManager, `keep-alive` is not used so the connection will close immediately const client = this.uploadHttpManager.getClient(0); - const headers = utils_1.getUploadHeaders('application/json', false); + const headers = (0, utils_1.getUploadHeaders)('application/json', false); // Extra information to display when a particular HTTP code is returned const customErrorMessages = new Map([ [ @@ -1774,7 +1828,7 @@ class UploadHttpClient { ] ]); // TODO retry for all possible response codes, the artifact upload is pretty much complete so it at all costs we should try to finish this - const response = yield requestUtils_1.retryHttpClientRequest('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); + const response = yield (0, requestUtils_1.retryHttpClientRequest)('Finalize artifact upload', () => __awaiter(this, void 0, void 0, function* () { return client.patch(resourceUrl.toString(), data, headers); }), customErrorMessages); yield response.readBody(); core.debug(`Artifact ${artifactName} has been successfully uploaded, total size in bytes: ${size}`); }); @@ -1792,7 +1846,11 @@ exports.UploadHttpClient = UploadHttpClient; var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); }) : (function(o, m, k, k2) { if (k2 === undefined) k2 = k; o[k2] = m[k]; @@ -1805,7 +1863,7 @@ var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? ( var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); __setModuleDefault(result, mod); return result; }; @@ -1827,12 +1885,12 @@ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { if (!fs.existsSync(rootDirectory)) { throw new Error(`Provided rootDirectory ${rootDirectory} does not exist`); } - if (!fs.lstatSync(rootDirectory).isDirectory()) { + if (!fs.statSync(rootDirectory).isDirectory()) { throw new Error(`Provided rootDirectory ${rootDirectory} is not a valid directory`); } // Normalize and resolve, this allows for either absolute or relative paths to be used - rootDirectory = path_1.normalize(rootDirectory); - rootDirectory = path_1.resolve(rootDirectory); + rootDirectory = (0, path_1.normalize)(rootDirectory); + rootDirectory = (0, path_1.resolve)(rootDirectory); /* Example to demonstrate behavior @@ -1856,16 +1914,16 @@ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { if (!fs.existsSync(file)) { throw new Error(`File ${file} does not exist`); } - if (!fs.lstatSync(file).isDirectory()) { + if (!fs.statSync(file).isDirectory()) { // Normalize and resolve, this allows for either absolute or relative paths to be used - file = path_1.normalize(file); - file = path_1.resolve(file); + file = (0, path_1.normalize)(file); + file = (0, path_1.resolve)(file); if (!file.startsWith(rootDirectory)) { throw new Error(`The rootDirectory: ${rootDirectory} is not a parent directory of the file: ${file}`); } // Check for forbidden characters in file paths that will be rejected during upload const uploadPath = file.replace(rootDirectory, ''); - path_and_artifact_name_validation_1.checkArtifactFilePath(uploadPath); + (0, path_and_artifact_name_validation_1.checkArtifactFilePath)(uploadPath); /* uploadFilePath denotes where the file will be uploaded in the file container on the server. During a run, if multiple artifacts are uploaded, they will all be saved in the same container. The artifact name is used as the root directory in the container to separate and distinguish uploaded artifacts @@ -1878,12 +1936,12 @@ function getUploadSpecification(artifactName, rootDirectory, artifactFiles) { */ specifications.push({ absoluteFilePath: file, - uploadFilePath: path_1.join(artifactName, uploadPath) + uploadFilePath: (0, path_1.join)(artifactName, uploadPath) }); } else { // Directories are rejected by the server during upload - core_1.debug(`Removing ${file} from rawSearchResults because it is a directory`); + (0, core_1.debug)(`Removing ${file} from rawSearchResults because it is a directory`); } } return specifications; @@ -1928,10 +1986,10 @@ function getExponentialRetryTimeInMilliseconds(retryCount) { throw new Error('RetryCount should not be negative'); } else if (retryCount === 0) { - return config_variables_1.getInitialRetryIntervalInMilliseconds(); + return (0, config_variables_1.getInitialRetryIntervalInMilliseconds)(); } - const minTime = config_variables_1.getInitialRetryIntervalInMilliseconds() * config_variables_1.getRetryMultiplier() * retryCount; - const maxTime = minTime * config_variables_1.getRetryMultiplier(); + const minTime = (0, config_variables_1.getInitialRetryIntervalInMilliseconds)() * (0, config_variables_1.getRetryMultiplier)() * retryCount; + const maxTime = minTime * (0, config_variables_1.getRetryMultiplier)(); // returns a random number between the minTime (inclusive) and the maxTime (exclusive) return Math.trunc(Math.random() * (maxTime - minTime) + minTime); } @@ -1999,13 +2057,13 @@ function tryGetRetryAfterValueTimeInMilliseconds(headers) { if (headers['retry-after']) { const retryTime = Number(headers['retry-after']); if (!isNaN(retryTime)) { - core_1.info(`Retry-After header is present with a value of ${retryTime}`); + (0, core_1.info)(`Retry-After header is present with a value of ${retryTime}`); return retryTime * 1000; } - core_1.info(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); + (0, core_1.info)(`Returned retry-after header value: ${retryTime} is non-numeric and cannot be used`); return undefined; } - core_1.info(`No retry-after header was found. Dumping all headers for diagnostic purposes`); + (0, core_1.info)(`No retry-after header was found. Dumping all headers for diagnostic purposes`); // eslint-disable-next-line no-console console.log(headers); return undefined; @@ -2089,13 +2147,13 @@ function getUploadHeaders(contentType, isKeepAlive, isGzip, uncompressedLength, exports.getUploadHeaders = getUploadHeaders; function createHttpClient(userAgent) { return new http_client_1.HttpClient(userAgent, [ - new auth_1.BearerCredentialHandler(config_variables_1.getRuntimeToken()) + new auth_1.BearerCredentialHandler((0, config_variables_1.getRuntimeToken)()) ]); } exports.createHttpClient = createHttpClient; function getArtifactUrl() { - const artifactUrl = `${config_variables_1.getRuntimeUrl()}_apis/pipelines/workflows/${config_variables_1.getWorkFlowRunId()}/artifacts?api-version=${getApiVersion()}`; - core_1.debug(`Artifact Url: ${artifactUrl}`); + const artifactUrl = `${(0, config_variables_1.getRuntimeUrl)()}_apis/pipelines/workflows/${(0, config_variables_1.getWorkFlowRunId)()}/artifacts?api-version=${getApiVersion()}`; + (0, core_1.debug)(`Artifact Url: ${artifactUrl}`); return artifactUrl; } exports.getArtifactUrl = getArtifactUrl; @@ -2109,7 +2167,7 @@ exports.getArtifactUrl = getArtifactUrl; * Other information such as the headers, the response code and message might be useful, so this is displayed. */ function displayHttpDiagnostics(response) { - core_1.info(`##### Begin Diagnostic HTTP information ##### + (0, core_1.info)(`##### Begin Diagnostic HTTP information ##### Status Code: ${response.message.statusCode} Status Message: ${response.message.statusMessage} Header Information: ${JSON.stringify(response.message.headers, undefined, 2)} @@ -2137,7 +2195,7 @@ exports.createEmptyFilesForArtifact = createEmptyFilesForArtifact; function getFileSize(filePath) { return __awaiter(this, void 0, void 0, function* () { const stats = yield fs_1.promises.stat(filePath); - core_1.debug(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); + (0, core_1.debug)(`${filePath} size:(${stats.size}) blksize:(${stats.blksize}) blocks:(${stats.blocks})`); return stats.size; }); } @@ -2156,7 +2214,7 @@ function getProperRetention(retentionInput, retentionSetting) { if (retentionSetting) { const maxRetention = parseInt(retentionSetting); if (!isNaN(maxRetention) && maxRetention < retention) { - core_1.warning(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); + (0, core_1.warning)(`Retention days is greater than the max value allowed by the repository setting, reduce retention to ${maxRetention} days`); retention = maxRetention; } } @@ -2749,7 +2807,7 @@ class OidcClient { .catch(error => { throw new Error(`Failed to get ID Token. \n Error Code : ${error.statusCode}\n - Error Message: ${error.result.message}`); + Error Message: ${error.message}`); }); const id_token = (_a = res.result) === null || _a === void 0 ? void 0 : _a.value; if (!id_token) { @@ -3920,259 +3978,6 @@ class ExecState extends events.EventEmitter { /***/ }), -/***/ 4087: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.Context = void 0; -const fs_1 = __nccwpck_require__(7147); -const os_1 = __nccwpck_require__(2037); -class Context { - /** - * Hydrate the context from the environment - */ - constructor() { - var _a, _b, _c; - this.payload = {}; - if (process.env.GITHUB_EVENT_PATH) { - if ((0, fs_1.existsSync)(process.env.GITHUB_EVENT_PATH)) { - this.payload = JSON.parse((0, fs_1.readFileSync)(process.env.GITHUB_EVENT_PATH, { encoding: 'utf8' })); - } - else { - const path = process.env.GITHUB_EVENT_PATH; - process.stdout.write(`GITHUB_EVENT_PATH ${path} does not exist${os_1.EOL}`); - } - } - this.eventName = process.env.GITHUB_EVENT_NAME; - this.sha = process.env.GITHUB_SHA; - this.ref = process.env.GITHUB_REF; - this.workflow = process.env.GITHUB_WORKFLOW; - this.action = process.env.GITHUB_ACTION; - this.actor = process.env.GITHUB_ACTOR; - this.job = process.env.GITHUB_JOB; - this.runNumber = parseInt(process.env.GITHUB_RUN_NUMBER, 10); - this.runId = parseInt(process.env.GITHUB_RUN_ID, 10); - this.apiUrl = (_a = process.env.GITHUB_API_URL) !== null && _a !== void 0 ? _a : `https://api.github.com`; - this.serverUrl = (_b = process.env.GITHUB_SERVER_URL) !== null && _b !== void 0 ? _b : `https://github.com`; - this.graphqlUrl = - (_c = process.env.GITHUB_GRAPHQL_URL) !== null && _c !== void 0 ? _c : `https://api.github.com/graphql`; - } - get issue() { - const payload = this.payload; - return Object.assign(Object.assign({}, this.repo), { number: (payload.issue || payload.pull_request || payload).number }); - } - get repo() { - if (process.env.GITHUB_REPOSITORY) { - const [owner, repo] = process.env.GITHUB_REPOSITORY.split('/'); - return { owner, repo }; - } - if (this.payload.repository) { - return { - owner: this.payload.repository.owner.login, - repo: this.payload.repository.name - }; - } - throw new Error("context.repo requires a GITHUB_REPOSITORY environment variable like 'owner/repo'"); - } -} -exports.Context = Context; -//# sourceMappingURL=context.js.map - -/***/ }), - -/***/ 5438: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokit = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const utils_1 = __nccwpck_require__(3030); -exports.context = new Context.Context(); -/** - * Returns a hydrated octokit ready to use for GitHub Actions - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokit(token, options, ...additionalPlugins) { - const GitHubWithPlugins = utils_1.GitHub.plugin(...additionalPlugins); - return new GitHubWithPlugins((0, utils_1.getOctokitOptions)(token, options)); -} -exports.getOctokit = getOctokit; -//# sourceMappingURL=github.js.map - -/***/ }), - -/***/ 7914: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getApiBaseUrl = exports.getProxyFetch = exports.getProxyAgentDispatcher = exports.getProxyAgent = exports.getAuthString = void 0; -const httpClient = __importStar(__nccwpck_require__(6255)); -const undici_1 = __nccwpck_require__(1773); -function getAuthString(token, options) { - if (!token && !options.auth) { - throw new Error('Parameter token or opts.auth is required'); - } - else if (token && options.auth) { - throw new Error('Parameters token and opts.auth may not both be specified'); - } - return typeof options.auth === 'string' ? options.auth : `token ${token}`; -} -exports.getAuthString = getAuthString; -function getProxyAgent(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgent(destinationUrl); -} -exports.getProxyAgent = getProxyAgent; -function getProxyAgentDispatcher(destinationUrl) { - const hc = new httpClient.HttpClient(); - return hc.getAgentDispatcher(destinationUrl); -} -exports.getProxyAgentDispatcher = getProxyAgentDispatcher; -function getProxyFetch(destinationUrl) { - const httpDispatcher = getProxyAgentDispatcher(destinationUrl); - const proxyFetch = (url, opts) => __awaiter(this, void 0, void 0, function* () { - return (0, undici_1.fetch)(url, Object.assign(Object.assign({}, opts), { dispatcher: httpDispatcher })); - }); - return proxyFetch; -} -exports.getProxyFetch = getProxyFetch; -function getApiBaseUrl() { - return process.env['GITHUB_API_URL'] || 'https://api.github.com'; -} -exports.getApiBaseUrl = getApiBaseUrl; -//# sourceMappingURL=utils.js.map - -/***/ }), - -/***/ 3030: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - var desc = Object.getOwnPropertyDescriptor(m, k); - if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { - desc = { enumerable: true, get: function() { return m[k]; } }; - } - Object.defineProperty(o, k2, desc); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getOctokitOptions = exports.GitHub = exports.defaults = exports.context = void 0; -const Context = __importStar(__nccwpck_require__(4087)); -const Utils = __importStar(__nccwpck_require__(7914)); -// octokit + plugins -const core_1 = __nccwpck_require__(6762); -const plugin_rest_endpoint_methods_1 = __nccwpck_require__(3044); -const plugin_paginate_rest_1 = __nccwpck_require__(4193); -exports.context = new Context.Context(); -const baseUrl = Utils.getApiBaseUrl(); -exports.defaults = { - baseUrl, - request: { - agent: Utils.getProxyAgent(baseUrl), - fetch: Utils.getProxyFetch(baseUrl) - } -}; -exports.GitHub = core_1.Octokit.plugin(plugin_rest_endpoint_methods_1.restEndpointMethods, plugin_paginate_rest_1.paginateRest).defaults(exports.defaults); -/** - * Convience function to correctly format Octokit Options to pass into the constructor. - * - * @param token the repo PAT or GITHUB_TOKEN - * @param options other options to set - */ -function getOctokitOptions(token, options) { - const opts = Object.assign({}, options || {}); // Shallow clone - don't mutate the object provided by the caller - // Auth - const auth = Utils.getAuthString(token, opts); - if (auth) { - opts.auth = auth; - } - return opts; -} -exports.getOctokitOptions = getOctokitOptions; -//# sourceMappingURL=utils.js.map - -/***/ }), - /***/ 5526: /***/ (function(__unused_webpack_module, exports) { @@ -4828,7 +4633,7 @@ class HttpClient { } const usingSsl = parsedUrl.protocol === 'https:'; proxyAgent = new undici_1.ProxyAgent(Object.assign({ uri: proxyUrl.href, pipelining: !this._keepAlive ? 0 : 1 }, ((proxyUrl.username || proxyUrl.password) && { - token: `${proxyUrl.username}:${proxyUrl.password}` + token: `Basic ${Buffer.from(`${proxyUrl.username}:${proxyUrl.password}`).toString('base64')}` }))); this._proxyAgentDispatcher = proxyAgent; if (usingSsl && this._ignoreSslError) { @@ -4942,11 +4747,11 @@ function getProxyUrl(reqUrl) { })(); if (proxyVar) { try { - return new URL(proxyVar); + return new DecodedURL(proxyVar); } catch (_a) { if (!proxyVar.startsWith('http://') && !proxyVar.startsWith('https://')) - return new URL(`http://${proxyVar}`); + return new DecodedURL(`http://${proxyVar}`); } } else { @@ -5005,201 +4810,214 @@ function isLoopbackAddress(host) { hostLower.startsWith('[::1]') || hostLower.startsWith('[0:0:0:0:0:0:0:1]')); } -//# sourceMappingURL=proxy.js.map - -/***/ }), - -/***/ 1962: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); -}) : (function(o, m, k, k2) { - if (k2 === undefined) k2 = k; - o[k2] = m[k]; -})); -var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { - Object.defineProperty(o, "default", { enumerable: true, value: v }); -}) : function(o, v) { - o["default"] = v; -}); -var __importStar = (this && this.__importStar) || function (mod) { - if (mod && mod.__esModule) return mod; - var result = {}; - if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); - __setModuleDefault(result, mod); - return result; -}; -var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { - function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } - return new (P || (P = Promise))(function (resolve, reject) { - function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } - function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } - function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } - step((generator = generator.apply(thisArg, _arguments || [])).next()); - }); -}; -var _a; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; -const fs = __importStar(__nccwpck_require__(7147)); -const path = __importStar(__nccwpck_require__(1017)); -_a = fs.promises -// export const {open} = 'fs' -, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; -// export const {open} = 'fs' -exports.IS_WINDOWS = process.platform === 'win32'; -// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 -exports.UV_FS_O_EXLOCK = 0x10000000; -exports.READONLY = fs.constants.O_RDONLY; -function exists(fsPath) { - return __awaiter(this, void 0, void 0, function* () { - try { - yield exports.stat(fsPath); - } - catch (err) { - if (err.code === 'ENOENT') { - return false; - } - throw err; - } - return true; - }); -} -exports.exists = exists; -function isDirectory(fsPath, useStat = false) { - return __awaiter(this, void 0, void 0, function* () { - const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); - return stats.isDirectory(); - }); -} -exports.isDirectory = isDirectory; -/** - * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: - * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). - */ -function isRooted(p) { - p = normalizeSeparators(p); - if (!p) { - throw new Error('isRooted() parameter "p" cannot be empty'); +class DecodedURL extends URL { + constructor(url, base) { + super(url, base); + this._decodedUsername = decodeURIComponent(super.username); + this._decodedPassword = decodeURIComponent(super.password); } - if (exports.IS_WINDOWS) { - return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello - ); // e.g. C: or C:\hello + get username() { + return this._decodedUsername; } - return p.startsWith('/'); -} -exports.isRooted = isRooted; -/** - * Best effort attempt to determine whether a file exists and is executable. - * @param filePath file path to check - * @param extensions additional file extensions to try - * @return if file exists and is executable, returns the file path. otherwise empty string. - */ -function tryGetExecutablePath(filePath, extensions) { - return __awaiter(this, void 0, void 0, function* () { - let stats = undefined; - try { - // test file exists - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // on Windows, test for valid extension - const upperExt = path.extname(filePath).toUpperCase(); - if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { - return filePath; - } - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - // try each extension - const originalFilePath = filePath; - for (const extension of extensions) { - filePath = originalFilePath + extension; - stats = undefined; - try { - stats = yield exports.stat(filePath); - } - catch (err) { - if (err.code !== 'ENOENT') { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); - } - } - if (stats && stats.isFile()) { - if (exports.IS_WINDOWS) { - // preserve the case of the actual file (since an extension was appended) - try { - const directory = path.dirname(filePath); - const upperName = path.basename(filePath).toUpperCase(); - for (const actualName of yield exports.readdir(directory)) { - if (upperName === actualName.toUpperCase()) { - filePath = path.join(directory, actualName); - break; - } - } - } - catch (err) { - // eslint-disable-next-line no-console - console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); - } - return filePath; - } - else { - if (isUnixExecutable(stats)) { - return filePath; - } - } - } - } - return ''; - }); -} -exports.tryGetExecutablePath = tryGetExecutablePath; -function normalizeSeparators(p) { - p = p || ''; - if (exports.IS_WINDOWS) { - // convert slashes on Windows - p = p.replace(/\//g, '\\'); - // remove redundant slashes - return p.replace(/\\\\+/g, '\\'); + get password() { + return this._decodedPassword; } - // remove redundant slashes - return p.replace(/\/\/+/g, '/'); -} -// on Mac/Linux, test the execute bit -// R W X R W X R W X -// 256 128 64 32 16 8 4 2 1 -function isUnixExecutable(stats) { - return ((stats.mode & 1) > 0 || - ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || - ((stats.mode & 64) > 0 && stats.uid === process.getuid())); -} -// Get the path of cmd.exe in windows -function getCmdPath() { - var _a; - return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; } -exports.getCmdPath = getCmdPath; -//# sourceMappingURL=io-util.js.map +//# sourceMappingURL=proxy.js.map /***/ }), -/***/ 7436: +/***/ 1962: +/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { + +"use strict"; + +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + Object.defineProperty(o, k2, { enumerable: true, get: function() { return m[k]; } }); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { + function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } + return new (P || (P = Promise))(function (resolve, reject) { + function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } + function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } + function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } + step((generator = generator.apply(thisArg, _arguments || [])).next()); + }); +}; +var _a; +Object.defineProperty(exports, "__esModule", ({ value: true })); +exports.getCmdPath = exports.tryGetExecutablePath = exports.isRooted = exports.isDirectory = exports.exists = exports.READONLY = exports.UV_FS_O_EXLOCK = exports.IS_WINDOWS = exports.unlink = exports.symlink = exports.stat = exports.rmdir = exports.rm = exports.rename = exports.readlink = exports.readdir = exports.open = exports.mkdir = exports.lstat = exports.copyFile = exports.chmod = void 0; +const fs = __importStar(__nccwpck_require__(7147)); +const path = __importStar(__nccwpck_require__(1017)); +_a = fs.promises +// export const {open} = 'fs' +, exports.chmod = _a.chmod, exports.copyFile = _a.copyFile, exports.lstat = _a.lstat, exports.mkdir = _a.mkdir, exports.open = _a.open, exports.readdir = _a.readdir, exports.readlink = _a.readlink, exports.rename = _a.rename, exports.rm = _a.rm, exports.rmdir = _a.rmdir, exports.stat = _a.stat, exports.symlink = _a.symlink, exports.unlink = _a.unlink; +// export const {open} = 'fs' +exports.IS_WINDOWS = process.platform === 'win32'; +// See https://github.com/nodejs/node/blob/d0153aee367422d0858105abec186da4dff0a0c5/deps/uv/include/uv/win.h#L691 +exports.UV_FS_O_EXLOCK = 0x10000000; +exports.READONLY = fs.constants.O_RDONLY; +function exists(fsPath) { + return __awaiter(this, void 0, void 0, function* () { + try { + yield exports.stat(fsPath); + } + catch (err) { + if (err.code === 'ENOENT') { + return false; + } + throw err; + } + return true; + }); +} +exports.exists = exists; +function isDirectory(fsPath, useStat = false) { + return __awaiter(this, void 0, void 0, function* () { + const stats = useStat ? yield exports.stat(fsPath) : yield exports.lstat(fsPath); + return stats.isDirectory(); + }); +} +exports.isDirectory = isDirectory; +/** + * On OSX/Linux, true if path starts with '/'. On Windows, true for paths like: + * \, \hello, \\hello\share, C:, and C:\hello (and corresponding alternate separator cases). + */ +function isRooted(p) { + p = normalizeSeparators(p); + if (!p) { + throw new Error('isRooted() parameter "p" cannot be empty'); + } + if (exports.IS_WINDOWS) { + return (p.startsWith('\\') || /^[A-Z]:/i.test(p) // e.g. \ or \hello or \\hello + ); // e.g. C: or C:\hello + } + return p.startsWith('/'); +} +exports.isRooted = isRooted; +/** + * Best effort attempt to determine whether a file exists and is executable. + * @param filePath file path to check + * @param extensions additional file extensions to try + * @return if file exists and is executable, returns the file path. otherwise empty string. + */ +function tryGetExecutablePath(filePath, extensions) { + return __awaiter(this, void 0, void 0, function* () { + let stats = undefined; + try { + // test file exists + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // on Windows, test for valid extension + const upperExt = path.extname(filePath).toUpperCase(); + if (extensions.some(validExt => validExt.toUpperCase() === upperExt)) { + return filePath; + } + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + // try each extension + const originalFilePath = filePath; + for (const extension of extensions) { + filePath = originalFilePath + extension; + stats = undefined; + try { + stats = yield exports.stat(filePath); + } + catch (err) { + if (err.code !== 'ENOENT') { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine if executable file exists '${filePath}': ${err}`); + } + } + if (stats && stats.isFile()) { + if (exports.IS_WINDOWS) { + // preserve the case of the actual file (since an extension was appended) + try { + const directory = path.dirname(filePath); + const upperName = path.basename(filePath).toUpperCase(); + for (const actualName of yield exports.readdir(directory)) { + if (upperName === actualName.toUpperCase()) { + filePath = path.join(directory, actualName); + break; + } + } + } + catch (err) { + // eslint-disable-next-line no-console + console.log(`Unexpected error attempting to determine the actual case of the file '${filePath}': ${err}`); + } + return filePath; + } + else { + if (isUnixExecutable(stats)) { + return filePath; + } + } + } + } + return ''; + }); +} +exports.tryGetExecutablePath = tryGetExecutablePath; +function normalizeSeparators(p) { + p = p || ''; + if (exports.IS_WINDOWS) { + // convert slashes on Windows + p = p.replace(/\//g, '\\'); + // remove redundant slashes + return p.replace(/\\\\+/g, '\\'); + } + // remove redundant slashes + return p.replace(/\/\/+/g, '/'); +} +// on Mac/Linux, test the execute bit +// R W X R W X R W X +// 256 128 64 32 16 8 4 2 1 +function isUnixExecutable(stats) { + return ((stats.mode & 1) > 0 || + ((stats.mode & 8) > 0 && stats.gid === process.getgid()) || + ((stats.mode & 64) > 0 && stats.uid === process.getuid())); +} +// Get the path of cmd.exe in windows +function getCmdPath() { + var _a; + return (_a = process.env['COMSPEC']) !== null && _a !== void 0 ? _a : `cmd.exe`; +} +exports.getCmdPath = getCmdPath; +//# sourceMappingURL=io-util.js.map + +/***/ }), + +/***/ 7436: /***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { "use strict"; @@ -5505,9540 +5323,4823 @@ function copyFile(srcFile, destFile, force) { /***/ }), -/***/ 334: -/***/ ((module) => { +/***/ 6761: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; +const Utils = __nccwpck_require__(5182); +const pth = __nccwpck_require__(1017); +const ZipEntry = __nccwpck_require__(4057); +const ZipFile = __nccwpck_require__(7744); -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); +const get_Bool = (...val) => Utils.findLast(val, (c) => typeof c === "boolean"); +const get_Str = (...val) => Utils.findLast(val, (c) => typeof c === "string"); +const get_Fun = (...val) => Utils.findLast(val, (c) => typeof c === "function"); -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - createTokenAuth: () => createTokenAuth -}); -module.exports = __toCommonJS(dist_src_exports); +const defaultOptions = { + // option "noSort" : if true it disables files sorting + noSort: false, + // read entries during load (initial loading may be slower) + readEntries: false, + // default method is none + method: Utils.Constants.NONE, + // file system + fs: null +}; -// pkg/dist-src/auth.js -var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; -var REGEX_IS_INSTALLATION = /^ghs_/; -var REGEX_IS_USER_TO_SERVER = /^ghu_/; -async function auth(token) { - const isApp = token.split(/\./).length === 3; - const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); - const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); - const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; - return { - type: "token", - token, - tokenType - }; -} +module.exports = function (/**String*/ input, /** object */ options) { + let inBuffer = null; -// pkg/dist-src/with-authorization-prefix.js -function withAuthorizationPrefix(token) { - if (token.split(/\./).length === 3) { - return `bearer ${token}`; - } - return `token ${token}`; -} + // create object based default options, allowing them to be overwritten + const opts = Object.assign(Object.create(null), defaultOptions); -// pkg/dist-src/hook.js -async function hook(token, request, route, parameters) { - const endpoint = request.endpoint.merge( - route, - parameters - ); - endpoint.headers.authorization = withAuthorizationPrefix(token); - return request(endpoint); -} + // test input variable + if (input && "object" === typeof input) { + // if value is not buffer we accept it to be object with options + if (!(input instanceof Uint8Array)) { + Object.assign(opts, input); + input = opts.input ? opts.input : undefined; + if (opts.input) delete opts.input; + } -// pkg/dist-src/index.js -var createTokenAuth = function createTokenAuth2(token) { - if (!token) { - throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); - } - if (typeof token !== "string") { - throw new Error( - "[@octokit/auth-token] Token passed to createTokenAuth is not a string" - ); - } - token = token.replace(/^(token|bearer) +/i, ""); - return Object.assign(auth.bind(null, token), { - hook: hook.bind(null, token) - }); -}; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); + // if input is buffer + if (Buffer.isBuffer(input)) { + inBuffer = input; + opts.method = Utils.Constants.BUFFER; + input = undefined; + } + } + // assign options + Object.assign(opts, options); -/***/ }), + // instanciate utils filesystem + const filetools = new Utils(opts); -/***/ 6762: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (typeof opts.decoder !== "object" || typeof opts.decoder.encode !== "function" || typeof opts.decoder.decode !== "function") { + opts.decoder = Utils.decoder; + } -"use strict"; + // if input is file name we retrieve its content + if (input && "string" === typeof input) { + // load zip file + if (filetools.fs.existsSync(input)) { + opts.method = Utils.Constants.FILE; + opts.filename = input; + inBuffer = filetools.fs.readFileSync(input); + } else { + throw Utils.Errors.INVALID_FILENAME(); + } + } -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + // create variable + const _zip = new ZipFile(inBuffer, opts); -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - Octokit: () => Octokit -}); -module.exports = __toCommonJS(dist_src_exports); -var import_universal_user_agent = __nccwpck_require__(5030); -var import_before_after_hook = __nccwpck_require__(3682); -var import_request = __nccwpck_require__(6234); -var import_graphql = __nccwpck_require__(8467); -var import_auth_token = __nccwpck_require__(334); + const { canonical, sanitize, zipnamefix } = Utils; -// pkg/dist-src/version.js -var VERSION = "5.1.0"; + function getEntry(/**Object*/ entry) { + if (entry && _zip) { + var item; + // If entry was given as a file name + if (typeof entry === "string") item = _zip.getEntry(pth.posix.normalize(entry)); + // if entry was given as a ZipEntry object + if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); -// pkg/dist-src/index.js -var noop = () => { -}; -var consoleWarn = console.warn.bind(console); -var consoleError = console.error.bind(console); -var userAgentTrail = `octokit-core.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; -var Octokit = class { - static { - this.VERSION = VERSION; - } - static defaults(defaults) { - const OctokitWithDefaults = class extends this { - constructor(...args) { - const options = args[0] || {}; - if (typeof defaults === "function") { - super(defaults(options)); - return; + if (item) { + return item; + } } - super( - Object.assign( - {}, - defaults, - options, - options.userAgent && defaults.userAgent ? { - userAgent: `${options.userAgent} ${defaults.userAgent}` - } : null - ) - ); - } - }; - return OctokitWithDefaults; - } - static { - this.plugins = []; - } - /** - * Attach a plugin (or many) to your Octokit instance. - * - * @example - * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) - */ - static plugin(...newPlugins) { - const currentPlugins = this.plugins; - const NewOctokit = class extends this { - static { - this.plugins = currentPlugins.concat( - newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) - ); - } - }; - return NewOctokit; - } - constructor(options = {}) { - const hook = new import_before_after_hook.Collection(); - const requestDefaults = { - baseUrl: import_request.request.endpoint.DEFAULTS.baseUrl, - headers: {}, - request: Object.assign({}, options.request, { - // @ts-ignore internal usage only, no need to type - hook: hook.bind(null, "request") - }), - mediaType: { - previews: [], - format: "" - } - }; - requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; - if (options.baseUrl) { - requestDefaults.baseUrl = options.baseUrl; - } - if (options.previews) { - requestDefaults.mediaType.previews = options.previews; - } - if (options.timeZone) { - requestDefaults.headers["time-zone"] = options.timeZone; + return null; } - this.request = import_request.request.defaults(requestDefaults); - this.graphql = (0, import_graphql.withCustomRequest)(this.request).defaults(requestDefaults); - this.log = Object.assign( - { - debug: noop, - info: noop, - warn: consoleWarn, - error: consoleError - }, - options.log - ); - this.hook = hook; - if (!options.authStrategy) { - if (!options.auth) { - this.auth = async () => ({ - type: "unauthenticated" - }); - } else { - const auth = (0, import_auth_token.createTokenAuth)(options.auth); - hook.wrap("request", auth.hook); - this.auth = auth; - } - } else { - const { authStrategy, ...otherOptions } = options; - const auth = authStrategy( - Object.assign( - { - request: this.request, - log: this.log, - // we pass the current octokit instance as well as its constructor options - // to allow for authentication strategies that return a new octokit instance - // that shares the same internal state as the current one. The original - // requirement for this was the "event-octokit" authentication strategy - // of https://github.com/probot/octokit-auth-probot. - octokit: this, - octokitOptions: otherOptions - }, - options.auth - ) - ); - hook.wrap("request", auth.hook); - this.auth = auth; + + function fixPath(zipPath) { + const { join, normalize, sep } = pth.posix; + // convert windows file separators and normalize + return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); } - const classConstructor = this.constructor; - for (let i = 0; i < classConstructor.plugins.length; ++i) { - Object.assign(this, classConstructor.plugins[i](this, options)); + + function filenameFilter(filterfn) { + if (filterfn instanceof RegExp) { + // if filter is RegExp wrap it + return (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filterfn); + } else if ("function" !== typeof filterfn) { + // if filter is not function we will replace it + return () => true; + } + return filterfn; } - } -}; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); + // keep last character on folders + const relativePath = (local, entry) => { + let lastChar = entry.slice(-1); + lastChar = lastChar === filetools.sep ? filetools.sep : ""; + return pth.relative(local, entry) + lastChar; + }; -/***/ }), + return { + /** + * Extracts the given entry from the archive and returns the content as a Buffer object + * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry + * @param {Buffer|string} [pass] - password + * @return Buffer or Null in case of error + */ + readFile: function (entry, pass) { + var item = getEntry(entry); + return (item && item.getData(pass)) || null; + }, -/***/ 9440: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Returns how many child elements has on entry (directories) on files it is always 0 + * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry + * @returns {integer} + */ + childCount: function (entry) { + const item = getEntry(entry); + if (item) { + return _zip.getChildCount(item); + } + }, -"use strict"; + /** + * Asynchronous readFile + * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry + * @param {callback} callback + * + * @return Buffer or Null in case of error + */ + readFileAsync: function (entry, callback) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(callback); + } else { + callback(null, "getEntry failed for:" + entry); + } + }, -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); + /** + * Extracts the given entry from the archive and returns the content as plain text in the given encoding + * @param {ZipEntry|string} entry - ZipEntry object or String with the full path of the entry + * @param {string} encoding - Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsText: function (entry, encoding) { + var item = getEntry(entry); + if (item) { + var data = item.getData(); + if (data && data.length) { + return data.toString(encoding || "utf8"); + } + } + return ""; + }, -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - endpoint: () => endpoint -}); -module.exports = __toCommonJS(dist_src_exports); + /** + * Asynchronous readAsText + * @param {ZipEntry|string} entry ZipEntry object or String with the full path of the entry + * @param {callback} callback + * @param {string} [encoding] - Optional. If no encoding is specified utf8 is used + * + * @return String + */ + readAsTextAsync: function (entry, callback, encoding) { + var item = getEntry(entry); + if (item) { + item.getDataAsync(function (data, err) { + if (err) { + callback(data, err); + return; + } -// pkg/dist-src/defaults.js -var import_universal_user_agent = __nccwpck_require__(5030); + if (data && data.length) { + callback(data.toString(encoding || "utf8")); + } else { + callback(""); + } + }); + } else { + callback(""); + } + }, -// pkg/dist-src/version.js -var VERSION = "9.0.4"; - -// pkg/dist-src/defaults.js -var userAgent = `octokit-endpoint.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}`; -var DEFAULTS = { - method: "GET", - baseUrl: "https://api.github.com", - headers: { - accept: "application/vnd.github.v3+json", - "user-agent": userAgent - }, - mediaType: { - format: "" - } -}; - -// pkg/dist-src/util/lowercase-keys.js -function lowercaseKeys(object) { - if (!object) { - return {}; - } - return Object.keys(object).reduce((newObj, key) => { - newObj[key.toLowerCase()] = object[key]; - return newObj; - }, {}); -} - -// pkg/dist-src/util/is-plain-object.js -function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); -} - -// pkg/dist-src/util/merge-deep.js -function mergeDeep(defaults, options) { - const result = Object.assign({}, defaults); - Object.keys(options).forEach((key) => { - if (isPlainObject(options[key])) { - if (!(key in defaults)) - Object.assign(result, { [key]: options[key] }); - else - result[key] = mergeDeep(defaults[key], options[key]); - } else { - Object.assign(result, { [key]: options[key] }); - } - }); - return result; -} - -// pkg/dist-src/util/remove-undefined-properties.js -function removeUndefinedProperties(obj) { - for (const key in obj) { - if (obj[key] === void 0) { - delete obj[key]; - } - } - return obj; -} - -// pkg/dist-src/merge.js -function merge(defaults, route, options) { - if (typeof route === "string") { - let [method, url] = route.split(" "); - options = Object.assign(url ? { method, url } : { url: method }, options); - } else { - options = Object.assign({}, route); - } - options.headers = lowercaseKeys(options.headers); - removeUndefinedProperties(options); - removeUndefinedProperties(options.headers); - const mergedOptions = mergeDeep(defaults || {}, options); - if (options.url === "/graphql") { - if (defaults && defaults.mediaType.previews?.length) { - mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( - (preview) => !mergedOptions.mediaType.previews.includes(preview) - ).concat(mergedOptions.mediaType.previews); - } - mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); - } - return mergedOptions; -} - -// pkg/dist-src/util/add-query-parameters.js -function addQueryParameters(url, parameters) { - const separator = /\?/.test(url) ? "&" : "?"; - const names = Object.keys(parameters); - if (names.length === 0) { - return url; - } - return url + separator + names.map((name) => { - if (name === "q") { - return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); - } - return `${name}=${encodeURIComponent(parameters[name])}`; - }).join("&"); -} - -// pkg/dist-src/util/extract-url-variable-names.js -var urlVariableRegex = /\{[^}]+\}/g; -function removeNonChars(variableName) { - return variableName.replace(/^\W+|\W+$/g, "").split(/,/); -} -function extractUrlVariableNames(url) { - const matches = url.match(urlVariableRegex); - if (!matches) { - return []; - } - return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); -} - -// pkg/dist-src/util/omit.js -function omit(object, keysToOmit) { - const result = { __proto__: null }; - for (const key of Object.keys(object)) { - if (keysToOmit.indexOf(key) === -1) { - result[key] = object[key]; - } - } - return result; -} - -// pkg/dist-src/util/url-template.js -function encodeReserved(str) { - return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { - if (!/%[0-9A-Fa-f]/.test(part)) { - part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); - } - return part; - }).join(""); -} -function encodeUnreserved(str) { - return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { - return "%" + c.charCodeAt(0).toString(16).toUpperCase(); - }); -} -function encodeValue(operator, value, key) { - value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); - if (key) { - return encodeUnreserved(key) + "=" + value; - } else { - return value; - } -} -function isDefined(value) { - return value !== void 0 && value !== null; -} -function isKeyOperator(operator) { - return operator === ";" || operator === "&" || operator === "?"; -} -function getValues(context, operator, key, modifier) { - var value = context[key], result = []; - if (isDefined(value) && value !== "") { - if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { - value = value.toString(); - if (modifier && modifier !== "*") { - value = value.substring(0, parseInt(modifier, 10)); - } - result.push( - encodeValue(operator, value, isKeyOperator(operator) ? key : "") - ); - } else { - if (modifier === "*") { - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - result.push( - encodeValue(operator, value2, isKeyOperator(operator) ? key : "") - ); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - result.push(encodeValue(operator, value[k], k)); - } - }); - } - } else { - const tmp = []; - if (Array.isArray(value)) { - value.filter(isDefined).forEach(function(value2) { - tmp.push(encodeValue(operator, value2)); - }); - } else { - Object.keys(value).forEach(function(k) { - if (isDefined(value[k])) { - tmp.push(encodeUnreserved(k)); - tmp.push(encodeValue(operator, value[k].toString())); - } - }); - } - if (isKeyOperator(operator)) { - result.push(encodeUnreserved(key) + "=" + tmp.join(",")); - } else if (tmp.length !== 0) { - result.push(tmp.join(",")); - } - } - } - } else { - if (operator === ";") { - if (isDefined(value)) { - result.push(encodeUnreserved(key)); - } - } else if (value === "" && (operator === "&" || operator === "?")) { - result.push(encodeUnreserved(key) + "="); - } else if (value === "") { - result.push(""); - } - } - return result; -} -function parseUrl(template) { - return { - expand: expand.bind(null, template) - }; -} -function expand(template, context) { - var operators = ["+", "#", ".", "/", ";", "?", "&"]; - template = template.replace( - /\{([^\{\}]+)\}|([^\{\}]+)/g, - function(_, expression, literal) { - if (expression) { - let operator = ""; - const values = []; - if (operators.indexOf(expression.charAt(0)) !== -1) { - operator = expression.charAt(0); - expression = expression.substr(1); - } - expression.split(/,/g).forEach(function(variable) { - var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); - values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); - }); - if (operator && operator !== "+") { - var separator = ","; - if (operator === "?") { - separator = "&"; - } else if (operator !== "#") { - separator = operator; - } - return (values.length !== 0 ? operator : "") + values.join(separator); - } else { - return values.join(","); - } - } else { - return encodeReserved(literal); - } - } - ); - if (template === "/") { - return template; - } else { - return template.replace(/\/$/, ""); - } -} - -// pkg/dist-src/parse.js -function parse(options) { - let method = options.method.toUpperCase(); - let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); - let headers = Object.assign({}, options.headers); - let body; - let parameters = omit(options, [ - "method", - "baseUrl", - "url", - "headers", - "request", - "mediaType" - ]); - const urlVariableNames = extractUrlVariableNames(url); - url = parseUrl(url).expand(parameters); - if (!/^http/.test(url)) { - url = options.baseUrl + url; - } - const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); - const remainingParameters = omit(parameters, omittedParameters); - const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); - if (!isBinaryRequest) { - if (options.mediaType.format) { - headers.accept = headers.accept.split(/,/).map( - (format) => format.replace( - /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, - `application/vnd$1$2.${options.mediaType.format}` - ) - ).join(","); - } - if (url.endsWith("/graphql")) { - if (options.mediaType.previews?.length) { - const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; - headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { - const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; - return `application/vnd.github.${preview}-preview${format}`; - }).join(","); - } - } - } - if (["GET", "HEAD"].includes(method)) { - url = addQueryParameters(url, remainingParameters); - } else { - if ("data" in remainingParameters) { - body = remainingParameters.data; - } else { - if (Object.keys(remainingParameters).length) { - body = remainingParameters; - } - } - } - if (!headers["content-type"] && typeof body !== "undefined") { - headers["content-type"] = "application/json; charset=utf-8"; - } - if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { - body = ""; - } - return Object.assign( - { method, url, headers }, - typeof body !== "undefined" ? { body } : null, - options.request ? { request: options.request } : null - ); -} - -// pkg/dist-src/endpoint-with-defaults.js -function endpointWithDefaults(defaults, route, options) { - return parse(merge(defaults, route, options)); -} - -// pkg/dist-src/with-defaults.js -function withDefaults(oldDefaults, newDefaults) { - const DEFAULTS2 = merge(oldDefaults, newDefaults); - const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); - return Object.assign(endpoint2, { - DEFAULTS: DEFAULTS2, - defaults: withDefaults.bind(null, DEFAULTS2), - merge: merge.bind(null, DEFAULTS2), - parse - }); -} - -// pkg/dist-src/index.js -var endpoint = withDefaults(null, DEFAULTS); -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 8467: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - GraphqlResponseError: () => GraphqlResponseError, - graphql: () => graphql2, - withCustomRequest: () => withCustomRequest -}); -module.exports = __toCommonJS(dist_src_exports); -var import_request3 = __nccwpck_require__(6234); -var import_universal_user_agent = __nccwpck_require__(5030); - -// pkg/dist-src/version.js -var VERSION = "7.0.2"; - -// pkg/dist-src/with-defaults.js -var import_request2 = __nccwpck_require__(6234); - -// pkg/dist-src/graphql.js -var import_request = __nccwpck_require__(6234); - -// pkg/dist-src/error.js -function _buildMessageForResponseErrors(data) { - return `Request failed due to following response errors: -` + data.errors.map((e) => ` - ${e.message}`).join("\n"); -} -var GraphqlResponseError = class extends Error { - constructor(request2, headers, response) { - super(_buildMessageForResponseErrors(response)); - this.request = request2; - this.headers = headers; - this.response = response; - this.name = "GraphqlResponseError"; - this.errors = response.errors; - this.data = response.data; - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - } -}; - -// pkg/dist-src/graphql.js -var NON_VARIABLE_OPTIONS = [ - "method", - "baseUrl", - "url", - "headers", - "request", - "query", - "mediaType" -]; -var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; -var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; -function graphql(request2, query, options) { - if (options) { - if (typeof query === "string" && "query" in options) { - return Promise.reject( - new Error(`[@octokit/graphql] "query" cannot be used as variable name`) - ); - } - for (const key in options) { - if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) - continue; - return Promise.reject( - new Error( - `[@octokit/graphql] "${key}" cannot be used as variable name` - ) - ); - } - } - const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; - const requestOptions = Object.keys( - parsedOptions - ).reduce((result, key) => { - if (NON_VARIABLE_OPTIONS.includes(key)) { - result[key] = parsedOptions[key]; - return result; - } - if (!result.variables) { - result.variables = {}; - } - result.variables[key] = parsedOptions[key]; - return result; - }, {}); - const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; - if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { - requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); - } - return request2(requestOptions).then((response) => { - if (response.data.errors) { - const headers = {}; - for (const key of Object.keys(response.headers)) { - headers[key] = response.headers[key]; - } - throw new GraphqlResponseError( - requestOptions, - headers, - response.data - ); - } - return response.data.data; - }); -} - -// pkg/dist-src/with-defaults.js -function withDefaults(request2, newDefaults) { - const newRequest = request2.defaults(newDefaults); - const newApi = (query, options) => { - return graphql(newRequest, query, options); - }; - return Object.assign(newApi, { - defaults: withDefaults.bind(null, newRequest), - endpoint: newRequest.endpoint - }); -} - -// pkg/dist-src/index.js -var graphql2 = withDefaults(import_request3.request, { - headers: { - "user-agent": `octokit-graphql.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` - }, - method: "POST", - url: "/graphql" -}); -function withCustomRequest(customRequest) { - return withDefaults(customRequest, { - method: "POST", - url: "/graphql" - }); -} -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 4193: -/***/ ((module) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - composePaginateRest: () => composePaginateRest, - isPaginatingEndpoint: () => isPaginatingEndpoint, - paginateRest: () => paginateRest, - paginatingEndpoints: () => paginatingEndpoints -}); -module.exports = __toCommonJS(dist_src_exports); - -// pkg/dist-src/version.js -var VERSION = "9.2.1"; - -// pkg/dist-src/normalize-paginated-list-response.js -function normalizePaginatedListResponse(response) { - if (!response.data) { - return { - ...response, - data: [] - }; - } - const responseNeedsNormalization = "total_count" in response.data && !("url" in response.data); - if (!responseNeedsNormalization) - return response; - const incompleteResults = response.data.incomplete_results; - const repositorySelection = response.data.repository_selection; - const totalCount = response.data.total_count; - delete response.data.incomplete_results; - delete response.data.repository_selection; - delete response.data.total_count; - const namespaceKey = Object.keys(response.data)[0]; - const data = response.data[namespaceKey]; - response.data = data; - if (typeof incompleteResults !== "undefined") { - response.data.incomplete_results = incompleteResults; - } - if (typeof repositorySelection !== "undefined") { - response.data.repository_selection = repositorySelection; - } - response.data.total_count = totalCount; - return response; -} - -// pkg/dist-src/iterator.js -function iterator(octokit, route, parameters) { - const options = typeof route === "function" ? route.endpoint(parameters) : octokit.request.endpoint(route, parameters); - const requestMethod = typeof route === "function" ? route : octokit.request; - const method = options.method; - const headers = options.headers; - let url = options.url; - return { - [Symbol.asyncIterator]: () => ({ - async next() { - if (!url) - return { done: true }; - try { - const response = await requestMethod({ method, url, headers }); - const normalizedResponse = normalizePaginatedListResponse(response); - url = ((normalizedResponse.headers.link || "").match( - /<([^>]+)>;\s*rel="next"/ - ) || [])[1]; - return { value: normalizedResponse }; - } catch (error) { - if (error.status !== 409) - throw error; - url = ""; - return { - value: { - status: 200, - headers: {}, - data: [] - } - }; - } - } - }) - }; -} - -// pkg/dist-src/paginate.js -function paginate(octokit, route, parameters, mapFn) { - if (typeof parameters === "function") { - mapFn = parameters; - parameters = void 0; - } - return gather( - octokit, - [], - iterator(octokit, route, parameters)[Symbol.asyncIterator](), - mapFn - ); -} -function gather(octokit, results, iterator2, mapFn) { - return iterator2.next().then((result) => { - if (result.done) { - return results; - } - let earlyExit = false; - function done() { - earlyExit = true; - } - results = results.concat( - mapFn ? mapFn(result.value, done) : result.value.data - ); - if (earlyExit) { - return results; - } - return gather(octokit, results, iterator2, mapFn); - }); -} - -// pkg/dist-src/compose-paginate.js -var composePaginateRest = Object.assign(paginate, { - iterator -}); - -// pkg/dist-src/generated/paginating-endpoints.js -var paginatingEndpoints = [ - "GET /advisories", - "GET /app/hook/deliveries", - "GET /app/installation-requests", - "GET /app/installations", - "GET /assignments/{assignment_id}/accepted_assignments", - "GET /classrooms", - "GET /classrooms/{classroom_id}/assignments", - "GET /enterprises/{enterprise}/dependabot/alerts", - "GET /enterprises/{enterprise}/secret-scanning/alerts", - "GET /events", - "GET /gists", - "GET /gists/public", - "GET /gists/starred", - "GET /gists/{gist_id}/comments", - "GET /gists/{gist_id}/commits", - "GET /gists/{gist_id}/forks", - "GET /installation/repositories", - "GET /issues", - "GET /licenses", - "GET /marketplace_listing/plans", - "GET /marketplace_listing/plans/{plan_id}/accounts", - "GET /marketplace_listing/stubbed/plans", - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts", - "GET /networks/{owner}/{repo}/events", - "GET /notifications", - "GET /organizations", - "GET /orgs/{org}/actions/cache/usage-by-repository", - "GET /orgs/{org}/actions/permissions/repositories", - "GET /orgs/{org}/actions/runners", - "GET /orgs/{org}/actions/secrets", - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories", - "GET /orgs/{org}/actions/variables", - "GET /orgs/{org}/actions/variables/{name}/repositories", - "GET /orgs/{org}/blocks", - "GET /orgs/{org}/code-scanning/alerts", - "GET /orgs/{org}/codespaces", - "GET /orgs/{org}/codespaces/secrets", - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories", - "GET /orgs/{org}/copilot/billing/seats", - "GET /orgs/{org}/dependabot/alerts", - "GET /orgs/{org}/dependabot/secrets", - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories", - "GET /orgs/{org}/events", - "GET /orgs/{org}/failed_invitations", - "GET /orgs/{org}/hooks", - "GET /orgs/{org}/hooks/{hook_id}/deliveries", - "GET /orgs/{org}/installations", - "GET /orgs/{org}/invitations", - "GET /orgs/{org}/invitations/{invitation_id}/teams", - "GET /orgs/{org}/issues", - "GET /orgs/{org}/members", - "GET /orgs/{org}/members/{username}/codespaces", - "GET /orgs/{org}/migrations", - "GET /orgs/{org}/migrations/{migration_id}/repositories", - "GET /orgs/{org}/organization-roles/{role_id}/teams", - "GET /orgs/{org}/organization-roles/{role_id}/users", - "GET /orgs/{org}/outside_collaborators", - "GET /orgs/{org}/packages", - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - "GET /orgs/{org}/personal-access-token-requests", - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories", - "GET /orgs/{org}/personal-access-tokens", - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories", - "GET /orgs/{org}/projects", - "GET /orgs/{org}/properties/values", - "GET /orgs/{org}/public_members", - "GET /orgs/{org}/repos", - "GET /orgs/{org}/rulesets", - "GET /orgs/{org}/rulesets/rule-suites", - "GET /orgs/{org}/secret-scanning/alerts", - "GET /orgs/{org}/security-advisories", - "GET /orgs/{org}/teams", - "GET /orgs/{org}/teams/{team_slug}/discussions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions", - "GET /orgs/{org}/teams/{team_slug}/invitations", - "GET /orgs/{org}/teams/{team_slug}/members", - "GET /orgs/{org}/teams/{team_slug}/projects", - "GET /orgs/{org}/teams/{team_slug}/repos", - "GET /orgs/{org}/teams/{team_slug}/teams", - "GET /projects/columns/{column_id}/cards", - "GET /projects/{project_id}/collaborators", - "GET /projects/{project_id}/columns", - "GET /repos/{owner}/{repo}/actions/artifacts", - "GET /repos/{owner}/{repo}/actions/caches", - "GET /repos/{owner}/{repo}/actions/organization-secrets", - "GET /repos/{owner}/{repo}/actions/organization-variables", - "GET /repos/{owner}/{repo}/actions/runners", - "GET /repos/{owner}/{repo}/actions/runs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs", - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs", - "GET /repos/{owner}/{repo}/actions/secrets", - "GET /repos/{owner}/{repo}/actions/variables", - "GET /repos/{owner}/{repo}/actions/workflows", - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs", - "GET /repos/{owner}/{repo}/activity", - "GET /repos/{owner}/{repo}/assignees", - "GET /repos/{owner}/{repo}/branches", - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations", - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs", - "GET /repos/{owner}/{repo}/code-scanning/alerts", - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - "GET /repos/{owner}/{repo}/code-scanning/analyses", - "GET /repos/{owner}/{repo}/codespaces", - "GET /repos/{owner}/{repo}/codespaces/devcontainers", - "GET /repos/{owner}/{repo}/codespaces/secrets", - "GET /repos/{owner}/{repo}/collaborators", - "GET /repos/{owner}/{repo}/comments", - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/commits", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments", - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls", - "GET /repos/{owner}/{repo}/commits/{ref}/check-runs", - "GET /repos/{owner}/{repo}/commits/{ref}/check-suites", - "GET /repos/{owner}/{repo}/commits/{ref}/status", - "GET /repos/{owner}/{repo}/commits/{ref}/statuses", - "GET /repos/{owner}/{repo}/contributors", - "GET /repos/{owner}/{repo}/dependabot/alerts", - "GET /repos/{owner}/{repo}/dependabot/secrets", - "GET /repos/{owner}/{repo}/deployments", - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses", - "GET /repos/{owner}/{repo}/environments", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies", - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps", - "GET /repos/{owner}/{repo}/events", - "GET /repos/{owner}/{repo}/forks", - "GET /repos/{owner}/{repo}/hooks", - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries", - "GET /repos/{owner}/{repo}/invitations", - "GET /repos/{owner}/{repo}/issues", - "GET /repos/{owner}/{repo}/issues/comments", - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/issues/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/comments", - "GET /repos/{owner}/{repo}/issues/{issue_number}/events", - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels", - "GET /repos/{owner}/{repo}/issues/{issue_number}/reactions", - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline", - "GET /repos/{owner}/{repo}/keys", - "GET /repos/{owner}/{repo}/labels", - "GET /repos/{owner}/{repo}/milestones", - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels", - "GET /repos/{owner}/{repo}/notifications", - "GET /repos/{owner}/{repo}/pages/builds", - "GET /repos/{owner}/{repo}/projects", - "GET /repos/{owner}/{repo}/pulls", - "GET /repos/{owner}/{repo}/pulls/comments", - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/commits", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/files", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews", - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments", - "GET /repos/{owner}/{repo}/releases", - "GET /repos/{owner}/{repo}/releases/{release_id}/assets", - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions", - "GET /repos/{owner}/{repo}/rules/branches/{branch}", - "GET /repos/{owner}/{repo}/rulesets", - "GET /repos/{owner}/{repo}/rulesets/rule-suites", - "GET /repos/{owner}/{repo}/secret-scanning/alerts", - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations", - "GET /repos/{owner}/{repo}/security-advisories", - "GET /repos/{owner}/{repo}/stargazers", - "GET /repos/{owner}/{repo}/subscribers", - "GET /repos/{owner}/{repo}/tags", - "GET /repos/{owner}/{repo}/teams", - "GET /repos/{owner}/{repo}/topics", - "GET /repositories", - "GET /repositories/{repository_id}/environments/{environment_name}/secrets", - "GET /repositories/{repository_id}/environments/{environment_name}/variables", - "GET /search/code", - "GET /search/commits", - "GET /search/issues", - "GET /search/labels", - "GET /search/repositories", - "GET /search/topics", - "GET /search/users", - "GET /teams/{team_id}/discussions", - "GET /teams/{team_id}/discussions/{discussion_number}/comments", - "GET /teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions", - "GET /teams/{team_id}/discussions/{discussion_number}/reactions", - "GET /teams/{team_id}/invitations", - "GET /teams/{team_id}/members", - "GET /teams/{team_id}/projects", - "GET /teams/{team_id}/repos", - "GET /teams/{team_id}/teams", - "GET /user/blocks", - "GET /user/codespaces", - "GET /user/codespaces/secrets", - "GET /user/emails", - "GET /user/followers", - "GET /user/following", - "GET /user/gpg_keys", - "GET /user/installations", - "GET /user/installations/{installation_id}/repositories", - "GET /user/issues", - "GET /user/keys", - "GET /user/marketplace_purchases", - "GET /user/marketplace_purchases/stubbed", - "GET /user/memberships/orgs", - "GET /user/migrations", - "GET /user/migrations/{migration_id}/repositories", - "GET /user/orgs", - "GET /user/packages", - "GET /user/packages/{package_type}/{package_name}/versions", - "GET /user/public_emails", - "GET /user/repos", - "GET /user/repository_invitations", - "GET /user/social_accounts", - "GET /user/ssh_signing_keys", - "GET /user/starred", - "GET /user/subscriptions", - "GET /user/teams", - "GET /users", - "GET /users/{username}/events", - "GET /users/{username}/events/orgs/{org}", - "GET /users/{username}/events/public", - "GET /users/{username}/followers", - "GET /users/{username}/following", - "GET /users/{username}/gists", - "GET /users/{username}/gpg_keys", - "GET /users/{username}/keys", - "GET /users/{username}/orgs", - "GET /users/{username}/packages", - "GET /users/{username}/projects", - "GET /users/{username}/received_events", - "GET /users/{username}/received_events/public", - "GET /users/{username}/repos", - "GET /users/{username}/social_accounts", - "GET /users/{username}/ssh_signing_keys", - "GET /users/{username}/starred", - "GET /users/{username}/subscriptions" -]; - -// pkg/dist-src/paginating-endpoints.js -function isPaginatingEndpoint(arg) { - if (typeof arg === "string") { - return paginatingEndpoints.includes(arg); - } else { - return false; - } -} - -// pkg/dist-src/index.js -function paginateRest(octokit) { - return { - paginate: Object.assign(paginate.bind(null, octokit), { - iterator: iterator.bind(null, octokit) - }) - }; -} -paginateRest.VERSION = VERSION; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 3044: -/***/ ((module) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - legacyRestEndpointMethods: () => legacyRestEndpointMethods, - restEndpointMethods: () => restEndpointMethods -}); -module.exports = __toCommonJS(dist_src_exports); - -// pkg/dist-src/version.js -var VERSION = "10.4.1"; - -// pkg/dist-src/generated/endpoints.js -var Endpoints = { - actions: { - addCustomLabelsToSelfHostedRunnerForOrg: [ - "POST /orgs/{org}/actions/runners/{runner_id}/labels" - ], - addCustomLabelsToSelfHostedRunnerForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - approveWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/approve" - ], - cancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/cancel" - ], - createEnvironmentVariable: [ - "POST /repositories/{repository_id}/environments/{environment_name}/variables" - ], - createOrUpdateEnvironmentSecret: [ - "PUT /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - createOrUpdateOrgSecret: ["PUT /orgs/{org}/actions/secrets/{secret_name}"], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - createOrgVariable: ["POST /orgs/{org}/actions/variables"], - createRegistrationTokenForOrg: [ - "POST /orgs/{org}/actions/runners/registration-token" - ], - createRegistrationTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/registration-token" - ], - createRemoveTokenForOrg: ["POST /orgs/{org}/actions/runners/remove-token"], - createRemoveTokenForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/remove-token" - ], - createRepoVariable: ["POST /repos/{owner}/{repo}/actions/variables"], - createWorkflowDispatch: [ - "POST /repos/{owner}/{repo}/actions/workflows/{workflow_id}/dispatches" - ], - deleteActionsCacheById: [ - "DELETE /repos/{owner}/{repo}/actions/caches/{cache_id}" - ], - deleteActionsCacheByKey: [ - "DELETE /repos/{owner}/{repo}/actions/caches{?key,ref}" - ], - deleteArtifact: [ - "DELETE /repos/{owner}/{repo}/actions/artifacts/{artifact_id}" - ], - deleteEnvironmentSecret: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - deleteEnvironmentVariable: [ - "DELETE /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/actions/secrets/{secret_name}"], - deleteOrgVariable: ["DELETE /orgs/{org}/actions/variables/{name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/actions/secrets/{secret_name}" - ], - deleteRepoVariable: [ - "DELETE /repos/{owner}/{repo}/actions/variables/{name}" - ], - deleteSelfHostedRunnerFromOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}" - ], - deleteSelfHostedRunnerFromRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - deleteWorkflowRun: ["DELETE /repos/{owner}/{repo}/actions/runs/{run_id}"], - deleteWorkflowRunLogs: [ - "DELETE /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - disableSelectedRepositoryGithubActionsOrganization: [ - "DELETE /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - disableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/disable" - ], - downloadArtifact: [ - "GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/{archive_format}" - ], - downloadJobLogsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/jobs/{job_id}/logs" - ], - downloadWorkflowRunAttemptLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/logs" - ], - downloadWorkflowRunLogs: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/logs" - ], - enableSelectedRepositoryGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories/{repository_id}" - ], - enableWorkflow: [ - "PUT /repos/{owner}/{repo}/actions/workflows/{workflow_id}/enable" - ], - forceCancelWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/force-cancel" - ], - generateRunnerJitconfigForOrg: [ - "POST /orgs/{org}/actions/runners/generate-jitconfig" - ], - generateRunnerJitconfigForRepo: [ - "POST /repos/{owner}/{repo}/actions/runners/generate-jitconfig" - ], - getActionsCacheList: ["GET /repos/{owner}/{repo}/actions/caches"], - getActionsCacheUsage: ["GET /repos/{owner}/{repo}/actions/cache/usage"], - getActionsCacheUsageByRepoForOrg: [ - "GET /orgs/{org}/actions/cache/usage-by-repository" - ], - getActionsCacheUsageForOrg: ["GET /orgs/{org}/actions/cache/usage"], - getAllowedActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/selected-actions" - ], - getAllowedActionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - getArtifact: ["GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}"], - getCustomOidcSubClaimForRepo: [ - "GET /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - getEnvironmentPublicKey: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/public-key" - ], - getEnvironmentSecret: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets/{secret_name}" - ], - getEnvironmentVariable: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - getGithubActionsDefaultWorkflowPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions/workflow" - ], - getGithubActionsDefaultWorkflowPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/workflow" - ], - getGithubActionsPermissionsOrganization: [ - "GET /orgs/{org}/actions/permissions" - ], - getGithubActionsPermissionsRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions" - ], - getJobForWorkflowRun: ["GET /repos/{owner}/{repo}/actions/jobs/{job_id}"], - getOrgPublicKey: ["GET /orgs/{org}/actions/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/actions/secrets/{secret_name}"], - getOrgVariable: ["GET /orgs/{org}/actions/variables/{name}"], - getPendingDeploymentsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - getRepoPermissions: [ - "GET /repos/{owner}/{repo}/actions/permissions", - {}, - { renamed: ["actions", "getGithubActionsPermissionsRepository"] } - ], - getRepoPublicKey: ["GET /repos/{owner}/{repo}/actions/secrets/public-key"], - getRepoSecret: ["GET /repos/{owner}/{repo}/actions/secrets/{secret_name}"], - getRepoVariable: ["GET /repos/{owner}/{repo}/actions/variables/{name}"], - getReviewsForRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/approvals" - ], - getSelfHostedRunnerForOrg: ["GET /orgs/{org}/actions/runners/{runner_id}"], - getSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}" - ], - getWorkflow: ["GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}"], - getWorkflowAccessToRepository: [ - "GET /repos/{owner}/{repo}/actions/permissions/access" - ], - getWorkflowRun: ["GET /repos/{owner}/{repo}/actions/runs/{run_id}"], - getWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}" - ], - getWorkflowRunUsage: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/timing" - ], - getWorkflowUsage: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/timing" - ], - listArtifactsForRepo: ["GET /repos/{owner}/{repo}/actions/artifacts"], - listEnvironmentSecrets: [ - "GET /repositories/{repository_id}/environments/{environment_name}/secrets" - ], - listEnvironmentVariables: [ - "GET /repositories/{repository_id}/environments/{environment_name}/variables" - ], - listJobsForWorkflowRun: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/jobs" - ], - listJobsForWorkflowRunAttempt: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/attempts/{attempt_number}/jobs" - ], - listLabelsForSelfHostedRunnerForOrg: [ - "GET /orgs/{org}/actions/runners/{runner_id}/labels" - ], - listLabelsForSelfHostedRunnerForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - listOrgSecrets: ["GET /orgs/{org}/actions/secrets"], - listOrgVariables: ["GET /orgs/{org}/actions/variables"], - listRepoOrganizationSecrets: [ - "GET /repos/{owner}/{repo}/actions/organization-secrets" - ], - listRepoOrganizationVariables: [ - "GET /repos/{owner}/{repo}/actions/organization-variables" - ], - listRepoSecrets: ["GET /repos/{owner}/{repo}/actions/secrets"], - listRepoVariables: ["GET /repos/{owner}/{repo}/actions/variables"], - listRepoWorkflows: ["GET /repos/{owner}/{repo}/actions/workflows"], - listRunnerApplicationsForOrg: ["GET /orgs/{org}/actions/runners/downloads"], - listRunnerApplicationsForRepo: [ - "GET /repos/{owner}/{repo}/actions/runners/downloads" - ], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - listSelectedReposForOrgVariable: [ - "GET /orgs/{org}/actions/variables/{name}/repositories" - ], - listSelectedRepositoriesEnabledGithubActionsOrganization: [ - "GET /orgs/{org}/actions/permissions/repositories" - ], - listSelfHostedRunnersForOrg: ["GET /orgs/{org}/actions/runners"], - listSelfHostedRunnersForRepo: ["GET /repos/{owner}/{repo}/actions/runners"], - listWorkflowRunArtifacts: [ - "GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts" - ], - listWorkflowRuns: [ - "GET /repos/{owner}/{repo}/actions/workflows/{workflow_id}/runs" - ], - listWorkflowRunsForRepo: ["GET /repos/{owner}/{repo}/actions/runs"], - reRunJobForWorkflowRun: [ - "POST /repos/{owner}/{repo}/actions/jobs/{job_id}/rerun" - ], - reRunWorkflow: ["POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun"], - reRunWorkflowFailedJobs: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/rerun-failed-jobs" - ], - removeAllCustomLabelsFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels" - ], - removeAllCustomLabelsFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - removeCustomLabelFromSelfHostedRunnerForOrg: [ - "DELETE /orgs/{org}/actions/runners/{runner_id}/labels/{name}" - ], - removeCustomLabelFromSelfHostedRunnerForRepo: [ - "DELETE /repos/{owner}/{repo}/actions/runners/{runner_id}/labels/{name}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/actions/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgVariable: [ - "DELETE /orgs/{org}/actions/variables/{name}/repositories/{repository_id}" - ], - reviewCustomGatesForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/deployment_protection_rule" - ], - reviewPendingDeploymentsForRun: [ - "POST /repos/{owner}/{repo}/actions/runs/{run_id}/pending_deployments" - ], - setAllowedActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/selected-actions" - ], - setAllowedActionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/selected-actions" - ], - setCustomLabelsForSelfHostedRunnerForOrg: [ - "PUT /orgs/{org}/actions/runners/{runner_id}/labels" - ], - setCustomLabelsForSelfHostedRunnerForRepo: [ - "PUT /repos/{owner}/{repo}/actions/runners/{runner_id}/labels" - ], - setCustomOidcSubClaimForRepo: [ - "PUT /repos/{owner}/{repo}/actions/oidc/customization/sub" - ], - setGithubActionsDefaultWorkflowPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/workflow" - ], - setGithubActionsDefaultWorkflowPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/workflow" - ], - setGithubActionsPermissionsOrganization: [ - "PUT /orgs/{org}/actions/permissions" - ], - setGithubActionsPermissionsRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/actions/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgVariable: [ - "PUT /orgs/{org}/actions/variables/{name}/repositories" - ], - setSelectedRepositoriesEnabledGithubActionsOrganization: [ - "PUT /orgs/{org}/actions/permissions/repositories" - ], - setWorkflowAccessToRepository: [ - "PUT /repos/{owner}/{repo}/actions/permissions/access" - ], - updateEnvironmentVariable: [ - "PATCH /repositories/{repository_id}/environments/{environment_name}/variables/{name}" - ], - updateOrgVariable: ["PATCH /orgs/{org}/actions/variables/{name}"], - updateRepoVariable: [ - "PATCH /repos/{owner}/{repo}/actions/variables/{name}" - ] - }, - activity: { - checkRepoIsStarredByAuthenticatedUser: ["GET /user/starred/{owner}/{repo}"], - deleteRepoSubscription: ["DELETE /repos/{owner}/{repo}/subscription"], - deleteThreadSubscription: [ - "DELETE /notifications/threads/{thread_id}/subscription" - ], - getFeeds: ["GET /feeds"], - getRepoSubscription: ["GET /repos/{owner}/{repo}/subscription"], - getThread: ["GET /notifications/threads/{thread_id}"], - getThreadSubscriptionForAuthenticatedUser: [ - "GET /notifications/threads/{thread_id}/subscription" - ], - listEventsForAuthenticatedUser: ["GET /users/{username}/events"], - listNotificationsForAuthenticatedUser: ["GET /notifications"], - listOrgEventsForAuthenticatedUser: [ - "GET /users/{username}/events/orgs/{org}" - ], - listPublicEvents: ["GET /events"], - listPublicEventsForRepoNetwork: ["GET /networks/{owner}/{repo}/events"], - listPublicEventsForUser: ["GET /users/{username}/events/public"], - listPublicOrgEvents: ["GET /orgs/{org}/events"], - listReceivedEventsForUser: ["GET /users/{username}/received_events"], - listReceivedPublicEventsForUser: [ - "GET /users/{username}/received_events/public" - ], - listRepoEvents: ["GET /repos/{owner}/{repo}/events"], - listRepoNotificationsForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/notifications" - ], - listReposStarredByAuthenticatedUser: ["GET /user/starred"], - listReposStarredByUser: ["GET /users/{username}/starred"], - listReposWatchedByUser: ["GET /users/{username}/subscriptions"], - listStargazersForRepo: ["GET /repos/{owner}/{repo}/stargazers"], - listWatchedReposForAuthenticatedUser: ["GET /user/subscriptions"], - listWatchersForRepo: ["GET /repos/{owner}/{repo}/subscribers"], - markNotificationsAsRead: ["PUT /notifications"], - markRepoNotificationsAsRead: ["PUT /repos/{owner}/{repo}/notifications"], - markThreadAsDone: ["DELETE /notifications/threads/{thread_id}"], - markThreadAsRead: ["PATCH /notifications/threads/{thread_id}"], - setRepoSubscription: ["PUT /repos/{owner}/{repo}/subscription"], - setThreadSubscription: [ - "PUT /notifications/threads/{thread_id}/subscription" - ], - starRepoForAuthenticatedUser: ["PUT /user/starred/{owner}/{repo}"], - unstarRepoForAuthenticatedUser: ["DELETE /user/starred/{owner}/{repo}"] - }, - apps: { - addRepoToInstallation: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "addRepoToInstallationForAuthenticatedUser"] } - ], - addRepoToInstallationForAuthenticatedUser: [ - "PUT /user/installations/{installation_id}/repositories/{repository_id}" - ], - checkToken: ["POST /applications/{client_id}/token"], - createFromManifest: ["POST /app-manifests/{code}/conversions"], - createInstallationAccessToken: [ - "POST /app/installations/{installation_id}/access_tokens" - ], - deleteAuthorization: ["DELETE /applications/{client_id}/grant"], - deleteInstallation: ["DELETE /app/installations/{installation_id}"], - deleteToken: ["DELETE /applications/{client_id}/token"], - getAuthenticated: ["GET /app"], - getBySlug: ["GET /apps/{app_slug}"], - getInstallation: ["GET /app/installations/{installation_id}"], - getOrgInstallation: ["GET /orgs/{org}/installation"], - getRepoInstallation: ["GET /repos/{owner}/{repo}/installation"], - getSubscriptionPlanForAccount: [ - "GET /marketplace_listing/accounts/{account_id}" - ], - getSubscriptionPlanForAccountStubbed: [ - "GET /marketplace_listing/stubbed/accounts/{account_id}" - ], - getUserInstallation: ["GET /users/{username}/installation"], - getWebhookConfigForApp: ["GET /app/hook/config"], - getWebhookDelivery: ["GET /app/hook/deliveries/{delivery_id}"], - listAccountsForPlan: ["GET /marketplace_listing/plans/{plan_id}/accounts"], - listAccountsForPlanStubbed: [ - "GET /marketplace_listing/stubbed/plans/{plan_id}/accounts" - ], - listInstallationReposForAuthenticatedUser: [ - "GET /user/installations/{installation_id}/repositories" - ], - listInstallationRequestsForAuthenticatedApp: [ - "GET /app/installation-requests" - ], - listInstallations: ["GET /app/installations"], - listInstallationsForAuthenticatedUser: ["GET /user/installations"], - listPlans: ["GET /marketplace_listing/plans"], - listPlansStubbed: ["GET /marketplace_listing/stubbed/plans"], - listReposAccessibleToInstallation: ["GET /installation/repositories"], - listSubscriptionsForAuthenticatedUser: ["GET /user/marketplace_purchases"], - listSubscriptionsForAuthenticatedUserStubbed: [ - "GET /user/marketplace_purchases/stubbed" - ], - listWebhookDeliveries: ["GET /app/hook/deliveries"], - redeliverWebhookDelivery: [ - "POST /app/hook/deliveries/{delivery_id}/attempts" - ], - removeRepoFromInstallation: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}", - {}, - { renamed: ["apps", "removeRepoFromInstallationForAuthenticatedUser"] } - ], - removeRepoFromInstallationForAuthenticatedUser: [ - "DELETE /user/installations/{installation_id}/repositories/{repository_id}" - ], - resetToken: ["PATCH /applications/{client_id}/token"], - revokeInstallationAccessToken: ["DELETE /installation/token"], - scopeToken: ["POST /applications/{client_id}/token/scoped"], - suspendInstallation: ["PUT /app/installations/{installation_id}/suspended"], - unsuspendInstallation: [ - "DELETE /app/installations/{installation_id}/suspended" - ], - updateWebhookConfigForApp: ["PATCH /app/hook/config"] - }, - billing: { - getGithubActionsBillingOrg: ["GET /orgs/{org}/settings/billing/actions"], - getGithubActionsBillingUser: [ - "GET /users/{username}/settings/billing/actions" - ], - getGithubPackagesBillingOrg: ["GET /orgs/{org}/settings/billing/packages"], - getGithubPackagesBillingUser: [ - "GET /users/{username}/settings/billing/packages" - ], - getSharedStorageBillingOrg: [ - "GET /orgs/{org}/settings/billing/shared-storage" - ], - getSharedStorageBillingUser: [ - "GET /users/{username}/settings/billing/shared-storage" - ] - }, - checks: { - create: ["POST /repos/{owner}/{repo}/check-runs"], - createSuite: ["POST /repos/{owner}/{repo}/check-suites"], - get: ["GET /repos/{owner}/{repo}/check-runs/{check_run_id}"], - getSuite: ["GET /repos/{owner}/{repo}/check-suites/{check_suite_id}"], - listAnnotations: [ - "GET /repos/{owner}/{repo}/check-runs/{check_run_id}/annotations" - ], - listForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-runs"], - listForSuite: [ - "GET /repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs" - ], - listSuitesForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/check-suites"], - rerequestRun: [ - "POST /repos/{owner}/{repo}/check-runs/{check_run_id}/rerequest" - ], - rerequestSuite: [ - "POST /repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest" - ], - setSuitesPreferences: [ - "PATCH /repos/{owner}/{repo}/check-suites/preferences" - ], - update: ["PATCH /repos/{owner}/{repo}/check-runs/{check_run_id}"] - }, - codeScanning: { - deleteAnalysis: [ - "DELETE /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}{?confirm_delete}" - ], - getAlert: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}", - {}, - { renamedParameters: { alert_id: "alert_number" } } - ], - getAnalysis: [ - "GET /repos/{owner}/{repo}/code-scanning/analyses/{analysis_id}" - ], - getCodeqlDatabase: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases/{language}" - ], - getDefaultSetup: ["GET /repos/{owner}/{repo}/code-scanning/default-setup"], - getSarif: ["GET /repos/{owner}/{repo}/code-scanning/sarifs/{sarif_id}"], - listAlertInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances" - ], - listAlertsForOrg: ["GET /orgs/{org}/code-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/code-scanning/alerts"], - listAlertsInstances: [ - "GET /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}/instances", - {}, - { renamed: ["codeScanning", "listAlertInstances"] } - ], - listCodeqlDatabases: [ - "GET /repos/{owner}/{repo}/code-scanning/codeql/databases" - ], - listRecentAnalyses: ["GET /repos/{owner}/{repo}/code-scanning/analyses"], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/code-scanning/alerts/{alert_number}" - ], - updateDefaultSetup: [ - "PATCH /repos/{owner}/{repo}/code-scanning/default-setup" - ], - uploadSarif: ["POST /repos/{owner}/{repo}/code-scanning/sarifs"] - }, - codesOfConduct: { - getAllCodesOfConduct: ["GET /codes_of_conduct"], - getConductCode: ["GET /codes_of_conduct/{key}"] - }, - codespaces: { - addRepositoryForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - checkPermissionsForDevcontainer: [ - "GET /repos/{owner}/{repo}/codespaces/permissions_check" - ], - codespaceMachinesForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/machines" - ], - createForAuthenticatedUser: ["POST /user/codespaces"], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - createOrUpdateSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}" - ], - createWithPrForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/codespaces" - ], - createWithRepoForAuthenticatedUser: [ - "POST /repos/{owner}/{repo}/codespaces" - ], - deleteForAuthenticatedUser: ["DELETE /user/codespaces/{codespace_name}"], - deleteFromOrganization: [ - "DELETE /orgs/{org}/members/{username}/codespaces/{codespace_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/codespaces/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - deleteSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}" - ], - exportForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/exports" - ], - getCodespacesForUserInOrg: [ - "GET /orgs/{org}/members/{username}/codespaces" - ], - getExportDetailsForAuthenticatedUser: [ - "GET /user/codespaces/{codespace_name}/exports/{export_id}" - ], - getForAuthenticatedUser: ["GET /user/codespaces/{codespace_name}"], - getOrgPublicKey: ["GET /orgs/{org}/codespaces/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/codespaces/secrets/{secret_name}"], - getPublicKeyForAuthenticatedUser: [ - "GET /user/codespaces/secrets/public-key" - ], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/codespaces/secrets/{secret_name}" - ], - getSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}" - ], - listDevcontainersInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/devcontainers" - ], - listForAuthenticatedUser: ["GET /user/codespaces"], - listInOrganization: [ - "GET /orgs/{org}/codespaces", - {}, - { renamedParameters: { org_id: "org" } } - ], - listInRepositoryForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces" - ], - listOrgSecrets: ["GET /orgs/{org}/codespaces/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/codespaces/secrets"], - listRepositoriesForSecretForAuthenticatedUser: [ - "GET /user/codespaces/secrets/{secret_name}/repositories" - ], - listSecretsForAuthenticatedUser: ["GET /user/codespaces/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - preFlightWithRepoForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/new" - ], - publishForAuthenticatedUser: [ - "POST /user/codespaces/{codespace_name}/publish" - ], - removeRepositoryForSecretForAuthenticatedUser: [ - "DELETE /user/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/codespaces/secrets/{secret_name}/repositories/{repository_id}" - ], - repoMachinesForAuthenticatedUser: [ - "GET /repos/{owner}/{repo}/codespaces/machines" - ], - setRepositoriesForSecretForAuthenticatedUser: [ - "PUT /user/codespaces/secrets/{secret_name}/repositories" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/codespaces/secrets/{secret_name}/repositories" - ], - startForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/start"], - stopForAuthenticatedUser: ["POST /user/codespaces/{codespace_name}/stop"], - stopInOrganization: [ - "POST /orgs/{org}/members/{username}/codespaces/{codespace_name}/stop" - ], - updateForAuthenticatedUser: ["PATCH /user/codespaces/{codespace_name}"] - }, - copilot: { - addCopilotSeatsForTeams: [ - "POST /orgs/{org}/copilot/billing/selected_teams" - ], - addCopilotSeatsForUsers: [ - "POST /orgs/{org}/copilot/billing/selected_users" - ], - cancelCopilotSeatAssignmentForTeams: [ - "DELETE /orgs/{org}/copilot/billing/selected_teams" - ], - cancelCopilotSeatAssignmentForUsers: [ - "DELETE /orgs/{org}/copilot/billing/selected_users" - ], - getCopilotOrganizationDetails: ["GET /orgs/{org}/copilot/billing"], - getCopilotSeatDetailsForUser: [ - "GET /orgs/{org}/members/{username}/copilot" - ], - listCopilotSeats: ["GET /orgs/{org}/copilot/billing/seats"] - }, - dependabot: { - addSelectedRepoToOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - createOrUpdateOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}" - ], - createOrUpdateRepoSecret: [ - "PUT /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - deleteOrgSecret: ["DELETE /orgs/{org}/dependabot/secrets/{secret_name}"], - deleteRepoSecret: [ - "DELETE /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - getAlert: ["GET /repos/{owner}/{repo}/dependabot/alerts/{alert_number}"], - getOrgPublicKey: ["GET /orgs/{org}/dependabot/secrets/public-key"], - getOrgSecret: ["GET /orgs/{org}/dependabot/secrets/{secret_name}"], - getRepoPublicKey: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/public-key" - ], - getRepoSecret: [ - "GET /repos/{owner}/{repo}/dependabot/secrets/{secret_name}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/dependabot/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/dependabot/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/dependabot/alerts"], - listOrgSecrets: ["GET /orgs/{org}/dependabot/secrets"], - listRepoSecrets: ["GET /repos/{owner}/{repo}/dependabot/secrets"], - listSelectedReposForOrgSecret: [ - "GET /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - removeSelectedRepoFromOrgSecret: [ - "DELETE /orgs/{org}/dependabot/secrets/{secret_name}/repositories/{repository_id}" - ], - setSelectedReposForOrgSecret: [ - "PUT /orgs/{org}/dependabot/secrets/{secret_name}/repositories" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/dependabot/alerts/{alert_number}" - ] - }, - dependencyGraph: { - createRepositorySnapshot: [ - "POST /repos/{owner}/{repo}/dependency-graph/snapshots" - ], - diffRange: [ - "GET /repos/{owner}/{repo}/dependency-graph/compare/{basehead}" - ], - exportSbom: ["GET /repos/{owner}/{repo}/dependency-graph/sbom"] - }, - emojis: { get: ["GET /emojis"] }, - gists: { - checkIsStarred: ["GET /gists/{gist_id}/star"], - create: ["POST /gists"], - createComment: ["POST /gists/{gist_id}/comments"], - delete: ["DELETE /gists/{gist_id}"], - deleteComment: ["DELETE /gists/{gist_id}/comments/{comment_id}"], - fork: ["POST /gists/{gist_id}/forks"], - get: ["GET /gists/{gist_id}"], - getComment: ["GET /gists/{gist_id}/comments/{comment_id}"], - getRevision: ["GET /gists/{gist_id}/{sha}"], - list: ["GET /gists"], - listComments: ["GET /gists/{gist_id}/comments"], - listCommits: ["GET /gists/{gist_id}/commits"], - listForUser: ["GET /users/{username}/gists"], - listForks: ["GET /gists/{gist_id}/forks"], - listPublic: ["GET /gists/public"], - listStarred: ["GET /gists/starred"], - star: ["PUT /gists/{gist_id}/star"], - unstar: ["DELETE /gists/{gist_id}/star"], - update: ["PATCH /gists/{gist_id}"], - updateComment: ["PATCH /gists/{gist_id}/comments/{comment_id}"] - }, - git: { - createBlob: ["POST /repos/{owner}/{repo}/git/blobs"], - createCommit: ["POST /repos/{owner}/{repo}/git/commits"], - createRef: ["POST /repos/{owner}/{repo}/git/refs"], - createTag: ["POST /repos/{owner}/{repo}/git/tags"], - createTree: ["POST /repos/{owner}/{repo}/git/trees"], - deleteRef: ["DELETE /repos/{owner}/{repo}/git/refs/{ref}"], - getBlob: ["GET /repos/{owner}/{repo}/git/blobs/{file_sha}"], - getCommit: ["GET /repos/{owner}/{repo}/git/commits/{commit_sha}"], - getRef: ["GET /repos/{owner}/{repo}/git/ref/{ref}"], - getTag: ["GET /repos/{owner}/{repo}/git/tags/{tag_sha}"], - getTree: ["GET /repos/{owner}/{repo}/git/trees/{tree_sha}"], - listMatchingRefs: ["GET /repos/{owner}/{repo}/git/matching-refs/{ref}"], - updateRef: ["PATCH /repos/{owner}/{repo}/git/refs/{ref}"] - }, - gitignore: { - getAllTemplates: ["GET /gitignore/templates"], - getTemplate: ["GET /gitignore/templates/{name}"] - }, - interactions: { - getRestrictionsForAuthenticatedUser: ["GET /user/interaction-limits"], - getRestrictionsForOrg: ["GET /orgs/{org}/interaction-limits"], - getRestrictionsForRepo: ["GET /repos/{owner}/{repo}/interaction-limits"], - getRestrictionsForYourPublicRepos: [ - "GET /user/interaction-limits", - {}, - { renamed: ["interactions", "getRestrictionsForAuthenticatedUser"] } - ], - removeRestrictionsForAuthenticatedUser: ["DELETE /user/interaction-limits"], - removeRestrictionsForOrg: ["DELETE /orgs/{org}/interaction-limits"], - removeRestrictionsForRepo: [ - "DELETE /repos/{owner}/{repo}/interaction-limits" - ], - removeRestrictionsForYourPublicRepos: [ - "DELETE /user/interaction-limits", - {}, - { renamed: ["interactions", "removeRestrictionsForAuthenticatedUser"] } - ], - setRestrictionsForAuthenticatedUser: ["PUT /user/interaction-limits"], - setRestrictionsForOrg: ["PUT /orgs/{org}/interaction-limits"], - setRestrictionsForRepo: ["PUT /repos/{owner}/{repo}/interaction-limits"], - setRestrictionsForYourPublicRepos: [ - "PUT /user/interaction-limits", - {}, - { renamed: ["interactions", "setRestrictionsForAuthenticatedUser"] } - ] - }, - issues: { - addAssignees: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - addLabels: ["POST /repos/{owner}/{repo}/issues/{issue_number}/labels"], - checkUserCanBeAssigned: ["GET /repos/{owner}/{repo}/assignees/{assignee}"], - checkUserCanBeAssignedToIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/assignees/{assignee}" - ], - create: ["POST /repos/{owner}/{repo}/issues"], - createComment: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/comments" - ], - createLabel: ["POST /repos/{owner}/{repo}/labels"], - createMilestone: ["POST /repos/{owner}/{repo}/milestones"], - deleteComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}" - ], - deleteLabel: ["DELETE /repos/{owner}/{repo}/labels/{name}"], - deleteMilestone: [ - "DELETE /repos/{owner}/{repo}/milestones/{milestone_number}" - ], - get: ["GET /repos/{owner}/{repo}/issues/{issue_number}"], - getComment: ["GET /repos/{owner}/{repo}/issues/comments/{comment_id}"], - getEvent: ["GET /repos/{owner}/{repo}/issues/events/{event_id}"], - getLabel: ["GET /repos/{owner}/{repo}/labels/{name}"], - getMilestone: ["GET /repos/{owner}/{repo}/milestones/{milestone_number}"], - list: ["GET /issues"], - listAssignees: ["GET /repos/{owner}/{repo}/assignees"], - listComments: ["GET /repos/{owner}/{repo}/issues/{issue_number}/comments"], - listCommentsForRepo: ["GET /repos/{owner}/{repo}/issues/comments"], - listEvents: ["GET /repos/{owner}/{repo}/issues/{issue_number}/events"], - listEventsForRepo: ["GET /repos/{owner}/{repo}/issues/events"], - listEventsForTimeline: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/timeline" - ], - listForAuthenticatedUser: ["GET /user/issues"], - listForOrg: ["GET /orgs/{org}/issues"], - listForRepo: ["GET /repos/{owner}/{repo}/issues"], - listLabelsForMilestone: [ - "GET /repos/{owner}/{repo}/milestones/{milestone_number}/labels" - ], - listLabelsForRepo: ["GET /repos/{owner}/{repo}/labels"], - listLabelsOnIssue: [ - "GET /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - listMilestones: ["GET /repos/{owner}/{repo}/milestones"], - lock: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/lock"], - removeAllLabels: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels" - ], - removeAssignees: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/assignees" - ], - removeLabel: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/labels/{name}" - ], - setLabels: ["PUT /repos/{owner}/{repo}/issues/{issue_number}/labels"], - unlock: ["DELETE /repos/{owner}/{repo}/issues/{issue_number}/lock"], - update: ["PATCH /repos/{owner}/{repo}/issues/{issue_number}"], - updateComment: ["PATCH /repos/{owner}/{repo}/issues/comments/{comment_id}"], - updateLabel: ["PATCH /repos/{owner}/{repo}/labels/{name}"], - updateMilestone: [ - "PATCH /repos/{owner}/{repo}/milestones/{milestone_number}" - ] - }, - licenses: { - get: ["GET /licenses/{license}"], - getAllCommonlyUsed: ["GET /licenses"], - getForRepo: ["GET /repos/{owner}/{repo}/license"] - }, - markdown: { - render: ["POST /markdown"], - renderRaw: [ - "POST /markdown/raw", - { headers: { "content-type": "text/plain; charset=utf-8" } } - ] - }, - meta: { - get: ["GET /meta"], - getAllVersions: ["GET /versions"], - getOctocat: ["GET /octocat"], - getZen: ["GET /zen"], - root: ["GET /"] - }, - migrations: { - cancelImport: [ - "DELETE /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.cancelImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#cancel-an-import" - } - ], - deleteArchiveForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/archive" - ], - deleteArchiveForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/archive" - ], - downloadArchiveForOrg: [ - "GET /orgs/{org}/migrations/{migration_id}/archive" - ], - getArchiveForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/archive" - ], - getCommitAuthors: [ - "GET /repos/{owner}/{repo}/import/authors", - {}, - { - deprecated: "octokit.rest.migrations.getCommitAuthors() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-commit-authors" - } - ], - getImportStatus: [ - "GET /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.getImportStatus() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-an-import-status" - } - ], - getLargeFiles: [ - "GET /repos/{owner}/{repo}/import/large_files", - {}, - { - deprecated: "octokit.rest.migrations.getLargeFiles() is deprecated, see https://docs.github.com/rest/migrations/source-imports#get-large-files" - } - ], - getStatusForAuthenticatedUser: ["GET /user/migrations/{migration_id}"], - getStatusForOrg: ["GET /orgs/{org}/migrations/{migration_id}"], - listForAuthenticatedUser: ["GET /user/migrations"], - listForOrg: ["GET /orgs/{org}/migrations"], - listReposForAuthenticatedUser: [ - "GET /user/migrations/{migration_id}/repositories" - ], - listReposForOrg: ["GET /orgs/{org}/migrations/{migration_id}/repositories"], - listReposForUser: [ - "GET /user/migrations/{migration_id}/repositories", - {}, - { renamed: ["migrations", "listReposForAuthenticatedUser"] } - ], - mapCommitAuthor: [ - "PATCH /repos/{owner}/{repo}/import/authors/{author_id}", - {}, - { - deprecated: "octokit.rest.migrations.mapCommitAuthor() is deprecated, see https://docs.github.com/rest/migrations/source-imports#map-a-commit-author" - } - ], - setLfsPreference: [ - "PATCH /repos/{owner}/{repo}/import/lfs", - {}, - { - deprecated: "octokit.rest.migrations.setLfsPreference() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-git-lfs-preference" - } - ], - startForAuthenticatedUser: ["POST /user/migrations"], - startForOrg: ["POST /orgs/{org}/migrations"], - startImport: [ - "PUT /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.startImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#start-an-import" - } - ], - unlockRepoForAuthenticatedUser: [ - "DELETE /user/migrations/{migration_id}/repos/{repo_name}/lock" - ], - unlockRepoForOrg: [ - "DELETE /orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock" - ], - updateImport: [ - "PATCH /repos/{owner}/{repo}/import", - {}, - { - deprecated: "octokit.rest.migrations.updateImport() is deprecated, see https://docs.github.com/rest/migrations/source-imports#update-an-import" - } - ] - }, - oidc: { - getOidcCustomSubTemplateForOrg: [ - "GET /orgs/{org}/actions/oidc/customization/sub" - ], - updateOidcCustomSubTemplateForOrg: [ - "PUT /orgs/{org}/actions/oidc/customization/sub" - ] - }, - orgs: { - addSecurityManagerTeam: [ - "PUT /orgs/{org}/security-managers/teams/{team_slug}" - ], - assignTeamToOrgRole: [ - "PUT /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - assignUserToOrgRole: [ - "PUT /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - blockUser: ["PUT /orgs/{org}/blocks/{username}"], - cancelInvitation: ["DELETE /orgs/{org}/invitations/{invitation_id}"], - checkBlockedUser: ["GET /orgs/{org}/blocks/{username}"], - checkMembershipForUser: ["GET /orgs/{org}/members/{username}"], - checkPublicMembershipForUser: ["GET /orgs/{org}/public_members/{username}"], - convertMemberToOutsideCollaborator: [ - "PUT /orgs/{org}/outside_collaborators/{username}" - ], - createCustomOrganizationRole: ["POST /orgs/{org}/organization-roles"], - createInvitation: ["POST /orgs/{org}/invitations"], - createOrUpdateCustomProperties: ["PATCH /orgs/{org}/properties/schema"], - createOrUpdateCustomPropertiesValuesForRepos: [ - "PATCH /orgs/{org}/properties/values" - ], - createOrUpdateCustomProperty: [ - "PUT /orgs/{org}/properties/schema/{custom_property_name}" - ], - createWebhook: ["POST /orgs/{org}/hooks"], - delete: ["DELETE /orgs/{org}"], - deleteCustomOrganizationRole: [ - "DELETE /orgs/{org}/organization-roles/{role_id}" - ], - deleteWebhook: ["DELETE /orgs/{org}/hooks/{hook_id}"], - enableOrDisableSecurityProductOnAllOrgRepos: [ - "POST /orgs/{org}/{security_product}/{enablement}" - ], - get: ["GET /orgs/{org}"], - getAllCustomProperties: ["GET /orgs/{org}/properties/schema"], - getCustomProperty: [ - "GET /orgs/{org}/properties/schema/{custom_property_name}" - ], - getMembershipForAuthenticatedUser: ["GET /user/memberships/orgs/{org}"], - getMembershipForUser: ["GET /orgs/{org}/memberships/{username}"], - getOrgRole: ["GET /orgs/{org}/organization-roles/{role_id}"], - getWebhook: ["GET /orgs/{org}/hooks/{hook_id}"], - getWebhookConfigForOrg: ["GET /orgs/{org}/hooks/{hook_id}/config"], - getWebhookDelivery: [ - "GET /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - list: ["GET /organizations"], - listAppInstallations: ["GET /orgs/{org}/installations"], - listBlockedUsers: ["GET /orgs/{org}/blocks"], - listCustomPropertiesValuesForRepos: ["GET /orgs/{org}/properties/values"], - listFailedInvitations: ["GET /orgs/{org}/failed_invitations"], - listForAuthenticatedUser: ["GET /user/orgs"], - listForUser: ["GET /users/{username}/orgs"], - listInvitationTeams: ["GET /orgs/{org}/invitations/{invitation_id}/teams"], - listMembers: ["GET /orgs/{org}/members"], - listMembershipsForAuthenticatedUser: ["GET /user/memberships/orgs"], - listOrgRoleTeams: ["GET /orgs/{org}/organization-roles/{role_id}/teams"], - listOrgRoleUsers: ["GET /orgs/{org}/organization-roles/{role_id}/users"], - listOrgRoles: ["GET /orgs/{org}/organization-roles"], - listOrganizationFineGrainedPermissions: [ - "GET /orgs/{org}/organization-fine-grained-permissions" - ], - listOutsideCollaborators: ["GET /orgs/{org}/outside_collaborators"], - listPatGrantRepositories: [ - "GET /orgs/{org}/personal-access-tokens/{pat_id}/repositories" - ], - listPatGrantRequestRepositories: [ - "GET /orgs/{org}/personal-access-token-requests/{pat_request_id}/repositories" - ], - listPatGrantRequests: ["GET /orgs/{org}/personal-access-token-requests"], - listPatGrants: ["GET /orgs/{org}/personal-access-tokens"], - listPendingInvitations: ["GET /orgs/{org}/invitations"], - listPublicMembers: ["GET /orgs/{org}/public_members"], - listSecurityManagerTeams: ["GET /orgs/{org}/security-managers"], - listWebhookDeliveries: ["GET /orgs/{org}/hooks/{hook_id}/deliveries"], - listWebhooks: ["GET /orgs/{org}/hooks"], - patchCustomOrganizationRole: [ - "PATCH /orgs/{org}/organization-roles/{role_id}" - ], - pingWebhook: ["POST /orgs/{org}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /orgs/{org}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeCustomProperty: [ - "DELETE /orgs/{org}/properties/schema/{custom_property_name}" - ], - removeMember: ["DELETE /orgs/{org}/members/{username}"], - removeMembershipForUser: ["DELETE /orgs/{org}/memberships/{username}"], - removeOutsideCollaborator: [ - "DELETE /orgs/{org}/outside_collaborators/{username}" - ], - removePublicMembershipForAuthenticatedUser: [ - "DELETE /orgs/{org}/public_members/{username}" - ], - removeSecurityManagerTeam: [ - "DELETE /orgs/{org}/security-managers/teams/{team_slug}" - ], - reviewPatGrantRequest: [ - "POST /orgs/{org}/personal-access-token-requests/{pat_request_id}" - ], - reviewPatGrantRequestsInBulk: [ - "POST /orgs/{org}/personal-access-token-requests" - ], - revokeAllOrgRolesTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}" - ], - revokeAllOrgRolesUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}" - ], - revokeOrgRoleTeam: [ - "DELETE /orgs/{org}/organization-roles/teams/{team_slug}/{role_id}" - ], - revokeOrgRoleUser: [ - "DELETE /orgs/{org}/organization-roles/users/{username}/{role_id}" - ], - setMembershipForUser: ["PUT /orgs/{org}/memberships/{username}"], - setPublicMembershipForAuthenticatedUser: [ - "PUT /orgs/{org}/public_members/{username}" - ], - unblockUser: ["DELETE /orgs/{org}/blocks/{username}"], - update: ["PATCH /orgs/{org}"], - updateMembershipForAuthenticatedUser: [ - "PATCH /user/memberships/orgs/{org}" - ], - updatePatAccess: ["POST /orgs/{org}/personal-access-tokens/{pat_id}"], - updatePatAccesses: ["POST /orgs/{org}/personal-access-tokens"], - updateWebhook: ["PATCH /orgs/{org}/hooks/{hook_id}"], - updateWebhookConfigForOrg: ["PATCH /orgs/{org}/hooks/{hook_id}/config"] - }, - packages: { - deletePackageForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}" - ], - deletePackageForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}" - ], - deletePackageForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}" - ], - deletePackageVersionForAuthenticatedUser: [ - "DELETE /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForOrg: [ - "DELETE /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - deletePackageVersionForUser: [ - "DELETE /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getAllPackageVersionsForAPackageOwnedByAnOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions", - {}, - { renamed: ["packages", "getAllPackageVersionsForPackageOwnedByOrg"] } - ], - getAllPackageVersionsForAPackageOwnedByTheAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions", - {}, - { - renamed: [ - "packages", - "getAllPackageVersionsForPackageOwnedByAuthenticatedUser" - ] - } - ], - getAllPackageVersionsForPackageOwnedByAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByOrg: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions" - ], - getAllPackageVersionsForPackageOwnedByUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions" - ], - getPackageForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}" - ], - getPackageForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}" - ], - getPackageForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}" - ], - getPackageVersionForAuthenticatedUser: [ - "GET /user/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForOrganization: [ - "GET /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - getPackageVersionForUser: [ - "GET /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}" - ], - listDockerMigrationConflictingPackagesForAuthenticatedUser: [ - "GET /user/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForOrganization: [ - "GET /orgs/{org}/docker/conflicts" - ], - listDockerMigrationConflictingPackagesForUser: [ - "GET /users/{username}/docker/conflicts" - ], - listPackagesForAuthenticatedUser: ["GET /user/packages"], - listPackagesForOrganization: ["GET /orgs/{org}/packages"], - listPackagesForUser: ["GET /users/{username}/packages"], - restorePackageForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/restore{?token}" - ], - restorePackageVersionForAuthenticatedUser: [ - "POST /user/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForOrg: [ - "POST /orgs/{org}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ], - restorePackageVersionForUser: [ - "POST /users/{username}/packages/{package_type}/{package_name}/versions/{package_version_id}/restore" - ] - }, - projects: { - addCollaborator: ["PUT /projects/{project_id}/collaborators/{username}"], - createCard: ["POST /projects/columns/{column_id}/cards"], - createColumn: ["POST /projects/{project_id}/columns"], - createForAuthenticatedUser: ["POST /user/projects"], - createForOrg: ["POST /orgs/{org}/projects"], - createForRepo: ["POST /repos/{owner}/{repo}/projects"], - delete: ["DELETE /projects/{project_id}"], - deleteCard: ["DELETE /projects/columns/cards/{card_id}"], - deleteColumn: ["DELETE /projects/columns/{column_id}"], - get: ["GET /projects/{project_id}"], - getCard: ["GET /projects/columns/cards/{card_id}"], - getColumn: ["GET /projects/columns/{column_id}"], - getPermissionForUser: [ - "GET /projects/{project_id}/collaborators/{username}/permission" - ], - listCards: ["GET /projects/columns/{column_id}/cards"], - listCollaborators: ["GET /projects/{project_id}/collaborators"], - listColumns: ["GET /projects/{project_id}/columns"], - listForOrg: ["GET /orgs/{org}/projects"], - listForRepo: ["GET /repos/{owner}/{repo}/projects"], - listForUser: ["GET /users/{username}/projects"], - moveCard: ["POST /projects/columns/cards/{card_id}/moves"], - moveColumn: ["POST /projects/columns/{column_id}/moves"], - removeCollaborator: [ - "DELETE /projects/{project_id}/collaborators/{username}" - ], - update: ["PATCH /projects/{project_id}"], - updateCard: ["PATCH /projects/columns/cards/{card_id}"], - updateColumn: ["PATCH /projects/columns/{column_id}"] - }, - pulls: { - checkIfMerged: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - create: ["POST /repos/{owner}/{repo}/pulls"], - createReplyForReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies" - ], - createReview: ["POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - createReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - deletePendingReview: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - deleteReviewComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ], - dismissReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/dismissals" - ], - get: ["GET /repos/{owner}/{repo}/pulls/{pull_number}"], - getReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - getReviewComment: ["GET /repos/{owner}/{repo}/pulls/comments/{comment_id}"], - list: ["GET /repos/{owner}/{repo}/pulls"], - listCommentsForReview: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/comments" - ], - listCommits: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/commits"], - listFiles: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/files"], - listRequestedReviewers: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - listReviewComments: [ - "GET /repos/{owner}/{repo}/pulls/{pull_number}/comments" - ], - listReviewCommentsForRepo: ["GET /repos/{owner}/{repo}/pulls/comments"], - listReviews: ["GET /repos/{owner}/{repo}/pulls/{pull_number}/reviews"], - merge: ["PUT /repos/{owner}/{repo}/pulls/{pull_number}/merge"], - removeRequestedReviewers: [ - "DELETE /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - requestReviewers: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers" - ], - submitReview: [ - "POST /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}/events" - ], - update: ["PATCH /repos/{owner}/{repo}/pulls/{pull_number}"], - updateBranch: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/update-branch" - ], - updateReview: [ - "PUT /repos/{owner}/{repo}/pulls/{pull_number}/reviews/{review_id}" - ], - updateReviewComment: [ - "PATCH /repos/{owner}/{repo}/pulls/comments/{comment_id}" - ] - }, - rateLimit: { get: ["GET /rate_limit"] }, - reactions: { - createForCommitComment: [ - "POST /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - createForIssue: [ - "POST /repos/{owner}/{repo}/issues/{issue_number}/reactions" - ], - createForIssueComment: [ - "POST /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - createForPullRequestReviewComment: [ - "POST /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - createForRelease: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - createForTeamDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - createForTeamDiscussionInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ], - deleteForCommitComment: [ - "DELETE /repos/{owner}/{repo}/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForIssue: [ - "DELETE /repos/{owner}/{repo}/issues/{issue_number}/reactions/{reaction_id}" - ], - deleteForIssueComment: [ - "DELETE /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForPullRequestComment: [ - "DELETE /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions/{reaction_id}" - ], - deleteForRelease: [ - "DELETE /repos/{owner}/{repo}/releases/{release_id}/reactions/{reaction_id}" - ], - deleteForTeamDiscussion: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions/{reaction_id}" - ], - deleteForTeamDiscussionComment: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions/{reaction_id}" - ], - listForCommitComment: [ - "GET /repos/{owner}/{repo}/comments/{comment_id}/reactions" - ], - listForIssue: ["GET /repos/{owner}/{repo}/issues/{issue_number}/reactions"], - listForIssueComment: [ - "GET /repos/{owner}/{repo}/issues/comments/{comment_id}/reactions" - ], - listForPullRequestReviewComment: [ - "GET /repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions" - ], - listForRelease: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/reactions" - ], - listForTeamDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}/reactions" - ], - listForTeamDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/reactions" - ] - }, - repos: { - acceptInvitation: [ - "PATCH /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "acceptInvitationForAuthenticatedUser"] } - ], - acceptInvitationForAuthenticatedUser: [ - "PATCH /user/repository_invitations/{invitation_id}" - ], - addAppAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - addCollaborator: ["PUT /repos/{owner}/{repo}/collaborators/{username}"], - addStatusCheckContexts: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - addTeamAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - addUserAccessRestrictions: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - cancelPagesDeployment: [ - "POST /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}/cancel" - ], - checkAutomatedSecurityFixes: [ - "GET /repos/{owner}/{repo}/automated-security-fixes" - ], - checkCollaborator: ["GET /repos/{owner}/{repo}/collaborators/{username}"], - checkVulnerabilityAlerts: [ - "GET /repos/{owner}/{repo}/vulnerability-alerts" - ], - codeownersErrors: ["GET /repos/{owner}/{repo}/codeowners/errors"], - compareCommits: ["GET /repos/{owner}/{repo}/compare/{base}...{head}"], - compareCommitsWithBasehead: [ - "GET /repos/{owner}/{repo}/compare/{basehead}" - ], - createAutolink: ["POST /repos/{owner}/{repo}/autolinks"], - createCommitComment: [ - "POST /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - createCommitSignatureProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - createCommitStatus: ["POST /repos/{owner}/{repo}/statuses/{sha}"], - createDeployKey: ["POST /repos/{owner}/{repo}/keys"], - createDeployment: ["POST /repos/{owner}/{repo}/deployments"], - createDeploymentBranchPolicy: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - createDeploymentProtectionRule: [ - "POST /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - createDeploymentStatus: [ - "POST /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - createDispatchEvent: ["POST /repos/{owner}/{repo}/dispatches"], - createForAuthenticatedUser: ["POST /user/repos"], - createFork: ["POST /repos/{owner}/{repo}/forks"], - createInOrg: ["POST /orgs/{org}/repos"], - createOrUpdateCustomPropertiesValues: [ - "PATCH /repos/{owner}/{repo}/properties/values" - ], - createOrUpdateEnvironment: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}" - ], - createOrUpdateFileContents: ["PUT /repos/{owner}/{repo}/contents/{path}"], - createOrgRuleset: ["POST /orgs/{org}/rulesets"], - createPagesDeployment: ["POST /repos/{owner}/{repo}/pages/deployments"], - createPagesSite: ["POST /repos/{owner}/{repo}/pages"], - createRelease: ["POST /repos/{owner}/{repo}/releases"], - createRepoRuleset: ["POST /repos/{owner}/{repo}/rulesets"], - createTagProtection: ["POST /repos/{owner}/{repo}/tags/protection"], - createUsingTemplate: [ - "POST /repos/{template_owner}/{template_repo}/generate" - ], - createWebhook: ["POST /repos/{owner}/{repo}/hooks"], - declineInvitation: [ - "DELETE /user/repository_invitations/{invitation_id}", - {}, - { renamed: ["repos", "declineInvitationForAuthenticatedUser"] } - ], - declineInvitationForAuthenticatedUser: [ - "DELETE /user/repository_invitations/{invitation_id}" - ], - delete: ["DELETE /repos/{owner}/{repo}"], - deleteAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - deleteAdminBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - deleteAnEnvironment: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}" - ], - deleteAutolink: ["DELETE /repos/{owner}/{repo}/autolinks/{autolink_id}"], - deleteBranchProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection" - ], - deleteCommitComment: ["DELETE /repos/{owner}/{repo}/comments/{comment_id}"], - deleteCommitSignatureProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - deleteDeployKey: ["DELETE /repos/{owner}/{repo}/keys/{key_id}"], - deleteDeployment: [ - "DELETE /repos/{owner}/{repo}/deployments/{deployment_id}" - ], - deleteDeploymentBranchPolicy: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - deleteFile: ["DELETE /repos/{owner}/{repo}/contents/{path}"], - deleteInvitation: [ - "DELETE /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - deleteOrgRuleset: ["DELETE /orgs/{org}/rulesets/{ruleset_id}"], - deletePagesSite: ["DELETE /repos/{owner}/{repo}/pages"], - deletePullRequestReviewProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - deleteRelease: ["DELETE /repos/{owner}/{repo}/releases/{release_id}"], - deleteReleaseAsset: [ - "DELETE /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - deleteRepoRuleset: ["DELETE /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - deleteTagProtection: [ - "DELETE /repos/{owner}/{repo}/tags/protection/{tag_protection_id}" - ], - deleteWebhook: ["DELETE /repos/{owner}/{repo}/hooks/{hook_id}"], - disableAutomatedSecurityFixes: [ - "DELETE /repos/{owner}/{repo}/automated-security-fixes" - ], - disableDeploymentProtectionRule: [ - "DELETE /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - disablePrivateVulnerabilityReporting: [ - "DELETE /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - disableVulnerabilityAlerts: [ - "DELETE /repos/{owner}/{repo}/vulnerability-alerts" - ], - downloadArchive: [ - "GET /repos/{owner}/{repo}/zipball/{ref}", - {}, - { renamed: ["repos", "downloadZipballArchive"] } - ], - downloadTarballArchive: ["GET /repos/{owner}/{repo}/tarball/{ref}"], - downloadZipballArchive: ["GET /repos/{owner}/{repo}/zipball/{ref}"], - enableAutomatedSecurityFixes: [ - "PUT /repos/{owner}/{repo}/automated-security-fixes" - ], - enablePrivateVulnerabilityReporting: [ - "PUT /repos/{owner}/{repo}/private-vulnerability-reporting" - ], - enableVulnerabilityAlerts: [ - "PUT /repos/{owner}/{repo}/vulnerability-alerts" - ], - generateReleaseNotes: [ - "POST /repos/{owner}/{repo}/releases/generate-notes" - ], - get: ["GET /repos/{owner}/{repo}"], - getAccessRestrictions: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions" - ], - getAdminBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - getAllDeploymentProtectionRules: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules" - ], - getAllEnvironments: ["GET /repos/{owner}/{repo}/environments"], - getAllStatusCheckContexts: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts" - ], - getAllTopics: ["GET /repos/{owner}/{repo}/topics"], - getAppsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps" - ], - getAutolink: ["GET /repos/{owner}/{repo}/autolinks/{autolink_id}"], - getBranch: ["GET /repos/{owner}/{repo}/branches/{branch}"], - getBranchProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection" - ], - getBranchRules: ["GET /repos/{owner}/{repo}/rules/branches/{branch}"], - getClones: ["GET /repos/{owner}/{repo}/traffic/clones"], - getCodeFrequencyStats: ["GET /repos/{owner}/{repo}/stats/code_frequency"], - getCollaboratorPermissionLevel: [ - "GET /repos/{owner}/{repo}/collaborators/{username}/permission" - ], - getCombinedStatusForRef: ["GET /repos/{owner}/{repo}/commits/{ref}/status"], - getCommit: ["GET /repos/{owner}/{repo}/commits/{ref}"], - getCommitActivityStats: ["GET /repos/{owner}/{repo}/stats/commit_activity"], - getCommitComment: ["GET /repos/{owner}/{repo}/comments/{comment_id}"], - getCommitSignatureProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_signatures" - ], - getCommunityProfileMetrics: ["GET /repos/{owner}/{repo}/community/profile"], - getContent: ["GET /repos/{owner}/{repo}/contents/{path}"], - getContributorsStats: ["GET /repos/{owner}/{repo}/stats/contributors"], - getCustomDeploymentProtectionRule: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/{protection_rule_id}" - ], - getCustomPropertiesValues: ["GET /repos/{owner}/{repo}/properties/values"], - getDeployKey: ["GET /repos/{owner}/{repo}/keys/{key_id}"], - getDeployment: ["GET /repos/{owner}/{repo}/deployments/{deployment_id}"], - getDeploymentBranchPolicy: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - getDeploymentStatus: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}" - ], - getEnvironment: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}" - ], - getLatestPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/latest"], - getLatestRelease: ["GET /repos/{owner}/{repo}/releases/latest"], - getOrgRuleSuite: ["GET /orgs/{org}/rulesets/rule-suites/{rule_suite_id}"], - getOrgRuleSuites: ["GET /orgs/{org}/rulesets/rule-suites"], - getOrgRuleset: ["GET /orgs/{org}/rulesets/{ruleset_id}"], - getOrgRulesets: ["GET /orgs/{org}/rulesets"], - getPages: ["GET /repos/{owner}/{repo}/pages"], - getPagesBuild: ["GET /repos/{owner}/{repo}/pages/builds/{build_id}"], - getPagesDeployment: [ - "GET /repos/{owner}/{repo}/pages/deployments/{pages_deployment_id}" - ], - getPagesHealthCheck: ["GET /repos/{owner}/{repo}/pages/health"], - getParticipationStats: ["GET /repos/{owner}/{repo}/stats/participation"], - getPullRequestReviewProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - getPunchCardStats: ["GET /repos/{owner}/{repo}/stats/punch_card"], - getReadme: ["GET /repos/{owner}/{repo}/readme"], - getReadmeInDirectory: ["GET /repos/{owner}/{repo}/readme/{dir}"], - getRelease: ["GET /repos/{owner}/{repo}/releases/{release_id}"], - getReleaseAsset: ["GET /repos/{owner}/{repo}/releases/assets/{asset_id}"], - getReleaseByTag: ["GET /repos/{owner}/{repo}/releases/tags/{tag}"], - getRepoRuleSuite: [ - "GET /repos/{owner}/{repo}/rulesets/rule-suites/{rule_suite_id}" - ], - getRepoRuleSuites: ["GET /repos/{owner}/{repo}/rulesets/rule-suites"], - getRepoRuleset: ["GET /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - getRepoRulesets: ["GET /repos/{owner}/{repo}/rulesets"], - getStatusChecksProtection: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - getTeamsWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams" - ], - getTopPaths: ["GET /repos/{owner}/{repo}/traffic/popular/paths"], - getTopReferrers: ["GET /repos/{owner}/{repo}/traffic/popular/referrers"], - getUsersWithAccessToProtectedBranch: [ - "GET /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users" - ], - getViews: ["GET /repos/{owner}/{repo}/traffic/views"], - getWebhook: ["GET /repos/{owner}/{repo}/hooks/{hook_id}"], - getWebhookConfigForRepo: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - getWebhookDelivery: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}" - ], - listActivities: ["GET /repos/{owner}/{repo}/activity"], - listAutolinks: ["GET /repos/{owner}/{repo}/autolinks"], - listBranches: ["GET /repos/{owner}/{repo}/branches"], - listBranchesForHeadCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/branches-where-head" - ], - listCollaborators: ["GET /repos/{owner}/{repo}/collaborators"], - listCommentsForCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/comments" - ], - listCommitCommentsForRepo: ["GET /repos/{owner}/{repo}/comments"], - listCommitStatusesForRef: [ - "GET /repos/{owner}/{repo}/commits/{ref}/statuses" - ], - listCommits: ["GET /repos/{owner}/{repo}/commits"], - listContributors: ["GET /repos/{owner}/{repo}/contributors"], - listCustomDeploymentRuleIntegrations: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment_protection_rules/apps" - ], - listDeployKeys: ["GET /repos/{owner}/{repo}/keys"], - listDeploymentBranchPolicies: [ - "GET /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies" - ], - listDeploymentStatuses: [ - "GET /repos/{owner}/{repo}/deployments/{deployment_id}/statuses" - ], - listDeployments: ["GET /repos/{owner}/{repo}/deployments"], - listForAuthenticatedUser: ["GET /user/repos"], - listForOrg: ["GET /orgs/{org}/repos"], - listForUser: ["GET /users/{username}/repos"], - listForks: ["GET /repos/{owner}/{repo}/forks"], - listInvitations: ["GET /repos/{owner}/{repo}/invitations"], - listInvitationsForAuthenticatedUser: ["GET /user/repository_invitations"], - listLanguages: ["GET /repos/{owner}/{repo}/languages"], - listPagesBuilds: ["GET /repos/{owner}/{repo}/pages/builds"], - listPublic: ["GET /repositories"], - listPullRequestsAssociatedWithCommit: [ - "GET /repos/{owner}/{repo}/commits/{commit_sha}/pulls" - ], - listReleaseAssets: [ - "GET /repos/{owner}/{repo}/releases/{release_id}/assets" - ], - listReleases: ["GET /repos/{owner}/{repo}/releases"], - listTagProtection: ["GET /repos/{owner}/{repo}/tags/protection"], - listTags: ["GET /repos/{owner}/{repo}/tags"], - listTeams: ["GET /repos/{owner}/{repo}/teams"], - listWebhookDeliveries: [ - "GET /repos/{owner}/{repo}/hooks/{hook_id}/deliveries" - ], - listWebhooks: ["GET /repos/{owner}/{repo}/hooks"], - merge: ["POST /repos/{owner}/{repo}/merges"], - mergeUpstream: ["POST /repos/{owner}/{repo}/merge-upstream"], - pingWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/pings"], - redeliverWebhookDelivery: [ - "POST /repos/{owner}/{repo}/hooks/{hook_id}/deliveries/{delivery_id}/attempts" - ], - removeAppAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - removeCollaborator: [ - "DELETE /repos/{owner}/{repo}/collaborators/{username}" - ], - removeStatusCheckContexts: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - removeStatusCheckProtection: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - removeTeamAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - removeUserAccessRestrictions: [ - "DELETE /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - renameBranch: ["POST /repos/{owner}/{repo}/branches/{branch}/rename"], - replaceAllTopics: ["PUT /repos/{owner}/{repo}/topics"], - requestPagesBuild: ["POST /repos/{owner}/{repo}/pages/builds"], - setAdminBranchProtection: [ - "POST /repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins" - ], - setAppAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/apps", - {}, - { mapToData: "apps" } - ], - setStatusCheckContexts: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts", - {}, - { mapToData: "contexts" } - ], - setTeamAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams", - {}, - { mapToData: "teams" } - ], - setUserAccessRestrictions: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users", - {}, - { mapToData: "users" } - ], - testPushWebhook: ["POST /repos/{owner}/{repo}/hooks/{hook_id}/tests"], - transfer: ["POST /repos/{owner}/{repo}/transfer"], - update: ["PATCH /repos/{owner}/{repo}"], - updateBranchProtection: [ - "PUT /repos/{owner}/{repo}/branches/{branch}/protection" - ], - updateCommitComment: ["PATCH /repos/{owner}/{repo}/comments/{comment_id}"], - updateDeploymentBranchPolicy: [ - "PUT /repos/{owner}/{repo}/environments/{environment_name}/deployment-branch-policies/{branch_policy_id}" - ], - updateInformationAboutPagesSite: ["PUT /repos/{owner}/{repo}/pages"], - updateInvitation: [ - "PATCH /repos/{owner}/{repo}/invitations/{invitation_id}" - ], - updateOrgRuleset: ["PUT /orgs/{org}/rulesets/{ruleset_id}"], - updatePullRequestReviewProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews" - ], - updateRelease: ["PATCH /repos/{owner}/{repo}/releases/{release_id}"], - updateReleaseAsset: [ - "PATCH /repos/{owner}/{repo}/releases/assets/{asset_id}" - ], - updateRepoRuleset: ["PUT /repos/{owner}/{repo}/rulesets/{ruleset_id}"], - updateStatusCheckPotection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks", - {}, - { renamed: ["repos", "updateStatusCheckProtection"] } - ], - updateStatusCheckProtection: [ - "PATCH /repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks" - ], - updateWebhook: ["PATCH /repos/{owner}/{repo}/hooks/{hook_id}"], - updateWebhookConfigForRepo: [ - "PATCH /repos/{owner}/{repo}/hooks/{hook_id}/config" - ], - uploadReleaseAsset: [ - "POST /repos/{owner}/{repo}/releases/{release_id}/assets{?name,label}", - { baseUrl: "https://uploads.github.com" } - ] - }, - search: { - code: ["GET /search/code"], - commits: ["GET /search/commits"], - issuesAndPullRequests: ["GET /search/issues"], - labels: ["GET /search/labels"], - repos: ["GET /search/repositories"], - topics: ["GET /search/topics"], - users: ["GET /search/users"] - }, - secretScanning: { - getAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ], - listAlertsForEnterprise: [ - "GET /enterprises/{enterprise}/secret-scanning/alerts" - ], - listAlertsForOrg: ["GET /orgs/{org}/secret-scanning/alerts"], - listAlertsForRepo: ["GET /repos/{owner}/{repo}/secret-scanning/alerts"], - listLocationsForAlert: [ - "GET /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}/locations" - ], - updateAlert: [ - "PATCH /repos/{owner}/{repo}/secret-scanning/alerts/{alert_number}" - ] - }, - securityAdvisories: { - createFork: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/forks" - ], - createPrivateVulnerabilityReport: [ - "POST /repos/{owner}/{repo}/security-advisories/reports" - ], - createRepositoryAdvisory: [ - "POST /repos/{owner}/{repo}/security-advisories" - ], - createRepositoryAdvisoryCveRequest: [ - "POST /repos/{owner}/{repo}/security-advisories/{ghsa_id}/cve" - ], - getGlobalAdvisory: ["GET /advisories/{ghsa_id}"], - getRepositoryAdvisory: [ - "GET /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ], - listGlobalAdvisories: ["GET /advisories"], - listOrgRepositoryAdvisories: ["GET /orgs/{org}/security-advisories"], - listRepositoryAdvisories: ["GET /repos/{owner}/{repo}/security-advisories"], - updateRepositoryAdvisory: [ - "PATCH /repos/{owner}/{repo}/security-advisories/{ghsa_id}" - ] - }, - teams: { - addOrUpdateMembershipForUserInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - addOrUpdateProjectPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - addOrUpdateRepoPermissionsInOrg: [ - "PUT /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - checkPermissionsForProjectInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - checkPermissionsForRepoInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - create: ["POST /orgs/{org}/teams"], - createDiscussionCommentInOrg: [ - "POST /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - createDiscussionInOrg: ["POST /orgs/{org}/teams/{team_slug}/discussions"], - deleteDiscussionCommentInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - deleteDiscussionInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - deleteInOrg: ["DELETE /orgs/{org}/teams/{team_slug}"], - getByName: ["GET /orgs/{org}/teams/{team_slug}"], - getDiscussionCommentInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - getDiscussionInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - getMembershipForUserInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - list: ["GET /orgs/{org}/teams"], - listChildInOrg: ["GET /orgs/{org}/teams/{team_slug}/teams"], - listDiscussionCommentsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments" - ], - listDiscussionsInOrg: ["GET /orgs/{org}/teams/{team_slug}/discussions"], - listForAuthenticatedUser: ["GET /user/teams"], - listMembersInOrg: ["GET /orgs/{org}/teams/{team_slug}/members"], - listPendingInvitationsInOrg: [ - "GET /orgs/{org}/teams/{team_slug}/invitations" - ], - listProjectsInOrg: ["GET /orgs/{org}/teams/{team_slug}/projects"], - listReposInOrg: ["GET /orgs/{org}/teams/{team_slug}/repos"], - removeMembershipForUserInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/memberships/{username}" - ], - removeProjectInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/projects/{project_id}" - ], - removeRepoInOrg: [ - "DELETE /orgs/{org}/teams/{team_slug}/repos/{owner}/{repo}" - ], - updateDiscussionCommentInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments/{comment_number}" - ], - updateDiscussionInOrg: [ - "PATCH /orgs/{org}/teams/{team_slug}/discussions/{discussion_number}" - ], - updateInOrg: ["PATCH /orgs/{org}/teams/{team_slug}"] - }, - users: { - addEmailForAuthenticated: [ - "POST /user/emails", - {}, - { renamed: ["users", "addEmailForAuthenticatedUser"] } - ], - addEmailForAuthenticatedUser: ["POST /user/emails"], - addSocialAccountForAuthenticatedUser: ["POST /user/social_accounts"], - block: ["PUT /user/blocks/{username}"], - checkBlocked: ["GET /user/blocks/{username}"], - checkFollowingForUser: ["GET /users/{username}/following/{target_user}"], - checkPersonIsFollowedByAuthenticated: ["GET /user/following/{username}"], - createGpgKeyForAuthenticated: [ - "POST /user/gpg_keys", - {}, - { renamed: ["users", "createGpgKeyForAuthenticatedUser"] } - ], - createGpgKeyForAuthenticatedUser: ["POST /user/gpg_keys"], - createPublicSshKeyForAuthenticated: [ - "POST /user/keys", - {}, - { renamed: ["users", "createPublicSshKeyForAuthenticatedUser"] } - ], - createPublicSshKeyForAuthenticatedUser: ["POST /user/keys"], - createSshSigningKeyForAuthenticatedUser: ["POST /user/ssh_signing_keys"], - deleteEmailForAuthenticated: [ - "DELETE /user/emails", - {}, - { renamed: ["users", "deleteEmailForAuthenticatedUser"] } - ], - deleteEmailForAuthenticatedUser: ["DELETE /user/emails"], - deleteGpgKeyForAuthenticated: [ - "DELETE /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "deleteGpgKeyForAuthenticatedUser"] } - ], - deleteGpgKeyForAuthenticatedUser: ["DELETE /user/gpg_keys/{gpg_key_id}"], - deletePublicSshKeyForAuthenticated: [ - "DELETE /user/keys/{key_id}", - {}, - { renamed: ["users", "deletePublicSshKeyForAuthenticatedUser"] } - ], - deletePublicSshKeyForAuthenticatedUser: ["DELETE /user/keys/{key_id}"], - deleteSocialAccountForAuthenticatedUser: ["DELETE /user/social_accounts"], - deleteSshSigningKeyForAuthenticatedUser: [ - "DELETE /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - follow: ["PUT /user/following/{username}"], - getAuthenticated: ["GET /user"], - getByUsername: ["GET /users/{username}"], - getContextForUser: ["GET /users/{username}/hovercard"], - getGpgKeyForAuthenticated: [ - "GET /user/gpg_keys/{gpg_key_id}", - {}, - { renamed: ["users", "getGpgKeyForAuthenticatedUser"] } - ], - getGpgKeyForAuthenticatedUser: ["GET /user/gpg_keys/{gpg_key_id}"], - getPublicSshKeyForAuthenticated: [ - "GET /user/keys/{key_id}", - {}, - { renamed: ["users", "getPublicSshKeyForAuthenticatedUser"] } - ], - getPublicSshKeyForAuthenticatedUser: ["GET /user/keys/{key_id}"], - getSshSigningKeyForAuthenticatedUser: [ - "GET /user/ssh_signing_keys/{ssh_signing_key_id}" - ], - list: ["GET /users"], - listBlockedByAuthenticated: [ - "GET /user/blocks", - {}, - { renamed: ["users", "listBlockedByAuthenticatedUser"] } - ], - listBlockedByAuthenticatedUser: ["GET /user/blocks"], - listEmailsForAuthenticated: [ - "GET /user/emails", - {}, - { renamed: ["users", "listEmailsForAuthenticatedUser"] } - ], - listEmailsForAuthenticatedUser: ["GET /user/emails"], - listFollowedByAuthenticated: [ - "GET /user/following", - {}, - { renamed: ["users", "listFollowedByAuthenticatedUser"] } - ], - listFollowedByAuthenticatedUser: ["GET /user/following"], - listFollowersForAuthenticatedUser: ["GET /user/followers"], - listFollowersForUser: ["GET /users/{username}/followers"], - listFollowingForUser: ["GET /users/{username}/following"], - listGpgKeysForAuthenticated: [ - "GET /user/gpg_keys", - {}, - { renamed: ["users", "listGpgKeysForAuthenticatedUser"] } - ], - listGpgKeysForAuthenticatedUser: ["GET /user/gpg_keys"], - listGpgKeysForUser: ["GET /users/{username}/gpg_keys"], - listPublicEmailsForAuthenticated: [ - "GET /user/public_emails", - {}, - { renamed: ["users", "listPublicEmailsForAuthenticatedUser"] } - ], - listPublicEmailsForAuthenticatedUser: ["GET /user/public_emails"], - listPublicKeysForUser: ["GET /users/{username}/keys"], - listPublicSshKeysForAuthenticated: [ - "GET /user/keys", - {}, - { renamed: ["users", "listPublicSshKeysForAuthenticatedUser"] } - ], - listPublicSshKeysForAuthenticatedUser: ["GET /user/keys"], - listSocialAccountsForAuthenticatedUser: ["GET /user/social_accounts"], - listSocialAccountsForUser: ["GET /users/{username}/social_accounts"], - listSshSigningKeysForAuthenticatedUser: ["GET /user/ssh_signing_keys"], - listSshSigningKeysForUser: ["GET /users/{username}/ssh_signing_keys"], - setPrimaryEmailVisibilityForAuthenticated: [ - "PATCH /user/email/visibility", - {}, - { renamed: ["users", "setPrimaryEmailVisibilityForAuthenticatedUser"] } - ], - setPrimaryEmailVisibilityForAuthenticatedUser: [ - "PATCH /user/email/visibility" - ], - unblock: ["DELETE /user/blocks/{username}"], - unfollow: ["DELETE /user/following/{username}"], - updateAuthenticated: ["PATCH /user"] - } -}; -var endpoints_default = Endpoints; - -// pkg/dist-src/endpoints-to-methods.js -var endpointMethodsMap = /* @__PURE__ */ new Map(); -for (const [scope, endpoints] of Object.entries(endpoints_default)) { - for (const [methodName, endpoint] of Object.entries(endpoints)) { - const [route, defaults, decorations] = endpoint; - const [method, url] = route.split(/ /); - const endpointDefaults = Object.assign( - { - method, - url - }, - defaults - ); - if (!endpointMethodsMap.has(scope)) { - endpointMethodsMap.set(scope, /* @__PURE__ */ new Map()); - } - endpointMethodsMap.get(scope).set(methodName, { - scope, - methodName, - endpointDefaults, - decorations - }); - } -} -var handler = { - has({ scope }, methodName) { - return endpointMethodsMap.get(scope).has(methodName); - }, - getOwnPropertyDescriptor(target, methodName) { - return { - value: this.get(target, methodName), - // ensures method is in the cache - configurable: true, - writable: true, - enumerable: true - }; - }, - defineProperty(target, methodName, descriptor) { - Object.defineProperty(target.cache, methodName, descriptor); - return true; - }, - deleteProperty(target, methodName) { - delete target.cache[methodName]; - return true; - }, - ownKeys({ scope }) { - return [...endpointMethodsMap.get(scope).keys()]; - }, - set(target, methodName, value) { - return target.cache[methodName] = value; - }, - get({ octokit, scope, cache }, methodName) { - if (cache[methodName]) { - return cache[methodName]; - } - const method = endpointMethodsMap.get(scope).get(methodName); - if (!method) { - return void 0; - } - const { endpointDefaults, decorations } = method; - if (decorations) { - cache[methodName] = decorate( - octokit, - scope, - methodName, - endpointDefaults, - decorations - ); - } else { - cache[methodName] = octokit.request.defaults(endpointDefaults); - } - return cache[methodName]; - } -}; -function endpointsToMethods(octokit) { - const newMethods = {}; - for (const scope of endpointMethodsMap.keys()) { - newMethods[scope] = new Proxy({ octokit, scope, cache: {} }, handler); - } - return newMethods; -} -function decorate(octokit, scope, methodName, defaults, decorations) { - const requestWithDefaults = octokit.request.defaults(defaults); - function withDecorations(...args) { - let options = requestWithDefaults.endpoint.merge(...args); - if (decorations.mapToData) { - options = Object.assign({}, options, { - data: options[decorations.mapToData], - [decorations.mapToData]: void 0 - }); - return requestWithDefaults(options); - } - if (decorations.renamed) { - const [newScope, newMethodName] = decorations.renamed; - octokit.log.warn( - `octokit.${scope}.${methodName}() has been renamed to octokit.${newScope}.${newMethodName}()` - ); - } - if (decorations.deprecated) { - octokit.log.warn(decorations.deprecated); - } - if (decorations.renamedParameters) { - const options2 = requestWithDefaults.endpoint.merge(...args); - for (const [name, alias] of Object.entries( - decorations.renamedParameters - )) { - if (name in options2) { - octokit.log.warn( - `"${name}" parameter is deprecated for "octokit.${scope}.${methodName}()". Use "${alias}" instead` - ); - if (!(alias in options2)) { - options2[alias] = options2[name]; - } - delete options2[name]; - } - } - return requestWithDefaults(options2); - } - return requestWithDefaults(...args); - } - return Object.assign(withDecorations, requestWithDefaults); -} - -// pkg/dist-src/index.js -function restEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - rest: api - }; -} -restEndpointMethods.VERSION = VERSION; -function legacyRestEndpointMethods(octokit) { - const api = endpointsToMethods(octokit); - return { - ...api, - rest: api - }; -} -legacyRestEndpointMethods.VERSION = VERSION; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 537: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var __create = Object.create; -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __getProtoOf = Object.getPrototypeOf; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toESM = (mod, isNodeMode, target) => (target = mod != null ? __create(__getProtoOf(mod)) : {}, __copyProps( - // If the importer is in node compatibility mode or this is not an ESM - // file that has been converted to a CommonJS file using a Babel- - // compatible transform (i.e. "__esModule" has not been set), then set - // "default" to the CommonJS "module.exports" for node compatibility. - isNodeMode || !mod || !mod.__esModule ? __defProp(target, "default", { value: mod, enumerable: true }) : target, - mod -)); -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - RequestError: () => RequestError -}); -module.exports = __toCommonJS(dist_src_exports); -var import_deprecation = __nccwpck_require__(8932); -var import_once = __toESM(__nccwpck_require__(1223)); -var logOnceCode = (0, import_once.default)((deprecation) => console.warn(deprecation)); -var logOnceHeaders = (0, import_once.default)((deprecation) => console.warn(deprecation)); -var RequestError = class extends Error { - constructor(message, statusCode, options) { - super(message); - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } - this.name = "HttpError"; - this.status = statusCode; - let headers; - if ("headers" in options && typeof options.headers !== "undefined") { - headers = options.headers; - } - if ("response" in options) { - this.response = options.response; - headers = options.response.headers; - } - const requestCopy = Object.assign({}, options.request); - if (options.request.headers.authorization) { - requestCopy.headers = Object.assign({}, options.request.headers, { - authorization: options.request.headers.authorization.replace( - / .*$/, - " [REDACTED]" - ) - }); - } - requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); - this.request = requestCopy; - Object.defineProperty(this, "code", { - get() { - logOnceCode( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.code` is deprecated, use `error.status`." - ) - ); - return statusCode; - } - }); - Object.defineProperty(this, "headers", { - get() { - logOnceHeaders( - new import_deprecation.Deprecation( - "[@octokit/request-error] `error.headers` is deprecated, use `error.response.headers`." - ) - ); - return headers || {}; - } - }); - } -}; -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 6234: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - -var __defProp = Object.defineProperty; -var __getOwnPropDesc = Object.getOwnPropertyDescriptor; -var __getOwnPropNames = Object.getOwnPropertyNames; -var __hasOwnProp = Object.prototype.hasOwnProperty; -var __export = (target, all) => { - for (var name in all) - __defProp(target, name, { get: all[name], enumerable: true }); -}; -var __copyProps = (to, from, except, desc) => { - if (from && typeof from === "object" || typeof from === "function") { - for (let key of __getOwnPropNames(from)) - if (!__hasOwnProp.call(to, key) && key !== except) - __defProp(to, key, { get: () => from[key], enumerable: !(desc = __getOwnPropDesc(from, key)) || desc.enumerable }); - } - return to; -}; -var __toCommonJS = (mod) => __copyProps(__defProp({}, "__esModule", { value: true }), mod); - -// pkg/dist-src/index.js -var dist_src_exports = {}; -__export(dist_src_exports, { - request: () => request -}); -module.exports = __toCommonJS(dist_src_exports); -var import_endpoint = __nccwpck_require__(9440); -var import_universal_user_agent = __nccwpck_require__(5030); - -// pkg/dist-src/version.js -var VERSION = "8.2.0"; - -// pkg/dist-src/is-plain-object.js -function isPlainObject(value) { - if (typeof value !== "object" || value === null) - return false; - if (Object.prototype.toString.call(value) !== "[object Object]") - return false; - const proto = Object.getPrototypeOf(value); - if (proto === null) - return true; - const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; - return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); -} - -// pkg/dist-src/fetch-wrapper.js -var import_request_error = __nccwpck_require__(537); - -// pkg/dist-src/get-buffer-response.js -function getBufferResponse(response) { - return response.arrayBuffer(); -} - -// pkg/dist-src/fetch-wrapper.js -function fetchWrapper(requestOptions) { - var _a, _b, _c; - const log = requestOptions.request && requestOptions.request.log ? requestOptions.request.log : console; - const parseSuccessResponseBody = ((_a = requestOptions.request) == null ? void 0 : _a.parseSuccessResponseBody) !== false; - if (isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body)) { - requestOptions.body = JSON.stringify(requestOptions.body); - } - let headers = {}; - let status; - let url; - let { fetch } = globalThis; - if ((_b = requestOptions.request) == null ? void 0 : _b.fetch) { - fetch = requestOptions.request.fetch; - } - if (!fetch) { - throw new Error( - "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" - ); - } - return fetch(requestOptions.url, { - method: requestOptions.method, - body: requestOptions.body, - headers: requestOptions.headers, - signal: (_c = requestOptions.request) == null ? void 0 : _c.signal, - // duplex must be set if request.body is ReadableStream or Async Iterables. - // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. - ...requestOptions.body && { duplex: "half" } - }).then(async (response) => { - url = response.url; - status = response.status; - for (const keyAndValue of response.headers) { - headers[keyAndValue[0]] = keyAndValue[1]; - } - if ("deprecation" in headers) { - const matches = headers.link && headers.link.match(/<([^>]+)>; rel="deprecation"/); - const deprecationLink = matches && matches.pop(); - log.warn( - `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${headers.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` - ); - } - if (status === 204 || status === 205) { - return; - } - if (requestOptions.method === "HEAD") { - if (status < 400) { - return; - } - throw new import_request_error.RequestError(response.statusText, status, { - response: { - url, - status, - headers, - data: void 0 - }, - request: requestOptions - }); - } - if (status === 304) { - throw new import_request_error.RequestError("Not modified", status, { - response: { - url, - status, - headers, - data: await getResponseData(response) - }, - request: requestOptions - }); - } - if (status >= 400) { - const data = await getResponseData(response); - const error = new import_request_error.RequestError(toErrorMessage(data), status, { - response: { - url, - status, - headers, - data - }, - request: requestOptions - }); - throw error; - } - return parseSuccessResponseBody ? await getResponseData(response) : response.body; - }).then((data) => { - return { - status, - url, - headers, - data - }; - }).catch((error) => { - if (error instanceof import_request_error.RequestError) - throw error; - else if (error.name === "AbortError") - throw error; - let message = error.message; - if (error.name === "TypeError" && "cause" in error) { - if (error.cause instanceof Error) { - message = error.cause.message; - } else if (typeof error.cause === "string") { - message = error.cause; - } - } - throw new import_request_error.RequestError(message, 500, { - request: requestOptions - }); - }); -} -async function getResponseData(response) { - const contentType = response.headers.get("content-type"); - if (/application\/json/.test(contentType)) { - return response.json().catch(() => response.text()).catch(() => ""); - } - if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { - return response.text(); - } - return getBufferResponse(response); -} -function toErrorMessage(data) { - if (typeof data === "string") - return data; - let suffix; - if ("documentation_url" in data) { - suffix = ` - ${data.documentation_url}`; - } else { - suffix = ""; - } - if ("message" in data) { - if (Array.isArray(data.errors)) { - return `${data.message}: ${data.errors.map(JSON.stringify).join(", ")}${suffix}`; - } - return `${data.message}${suffix}`; - } - return `Unknown error: ${JSON.stringify(data)}`; -} - -// pkg/dist-src/with-defaults.js -function withDefaults(oldEndpoint, newDefaults) { - const endpoint2 = oldEndpoint.defaults(newDefaults); - const newApi = function(route, parameters) { - const endpointOptions = endpoint2.merge(route, parameters); - if (!endpointOptions.request || !endpointOptions.request.hook) { - return fetchWrapper(endpoint2.parse(endpointOptions)); - } - const request2 = (route2, parameters2) => { - return fetchWrapper( - endpoint2.parse(endpoint2.merge(route2, parameters2)) - ); - }; - Object.assign(request2, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); - return endpointOptions.request.hook(request2, endpointOptions); - }; - return Object.assign(newApi, { - endpoint: endpoint2, - defaults: withDefaults.bind(null, endpoint2) - }); -} - -// pkg/dist-src/index.js -var request = withDefaults(import_endpoint.endpoint, { - headers: { - "user-agent": `octokit-request.js/${VERSION} ${(0, import_universal_user_agent.getUserAgent)()}` - } -}); -// Annotate the CommonJS export names for ESM import in node: -0 && (0); - - -/***/ }), - -/***/ 4194: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const lodash_1 = __importDefault(__nccwpck_require__(250)); -const readline_1 = __importDefault(__nccwpck_require__(4521)); -const adm_zip_1 = __importDefault(__nccwpck_require__(6761)); -const artifact_1 = __nccwpck_require__(2605); -const FilteredSite_1 = __nccwpck_require__(8336); -const DifferenceSite_1 = __nccwpck_require__(4598); -function createReadStreamSafe(filename) { - return new Promise((resolve, reject) => { - const fileStream = fs_1.default.createReadStream(filename); - fileStream.on("error", reject).on("open", () => { - resolve(fileStream); - }); - }); -} -const actionHelper = { - getRunnerID: (body) => { - const results = body.match("RunnerID:\\d+"); - if (results !== null && results.length !== 0) { - return results[0].split(":")[1]; - } - return null; - }, - processLineByLine: async (tsvFile) => { - const plugins = []; - try { - const fileStream = await createReadStreamSafe(tsvFile); - const rl = readline_1.default.createInterface({ - input: fileStream, - crlfDelay: Infinity, - }); - for await (const line of rl) { - if (!line.startsWith("#")) { - const tmp = line.split("\t"); - if (tmp[0].trim() !== "" && - tmp[1].trim().toUpperCase() === "IGNORE") { - plugins.push(tmp[0].trim()); - } - } - } - } - catch (err) { - console.log(`Error when reading the rules file: ${tsvFile}`); - } - return plugins; - }, - createMessage: (sites, runnerID, runnerLink) => { - const NXT_LINE = "\n"; - const TAB = "\t"; - const BULLET = "-"; - let msg = ""; - const instanceCount = 5; - sites.forEach((site) => { - msg = - msg + - `${BULLET} Site: [${site["@name"]}](${site["@name"]}) ${NXT_LINE}`; - if ("alerts" in site) { - if (site.alerts.length !== 0) { - msg = `${msg} ${TAB} **New Alerts** ${NXT_LINE}`; - site.alerts.forEach((alert) => { - msg = - msg + - TAB + - `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; - for (let i = 0; i < alert.instances.length; i++) { - if (i >= instanceCount) { - msg = msg + TAB + TAB + `${BULLET} .. ${NXT_LINE}`; - break; - } - const instance = alert.instances[i]; - msg = - msg + - TAB + - TAB + - `${BULLET} [${instance.uri}](${instance.uri}) ${NXT_LINE}`; - } - }); - msg = msg + NXT_LINE; - } - } - if ((0, DifferenceSite_1.isDifferenceSite)(site)) { - if (site.removedAlerts.length !== 0) { - msg = `${msg} ${TAB} **Resolved Alerts** ${NXT_LINE}`; - site.removedAlerts.forEach((alert) => { - msg = - msg + - TAB + - `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; - }); - msg = msg + NXT_LINE; - } - } - if ((0, FilteredSite_1.isFilteredSite)(site)) { - if (site.ignoredAlerts.length !== 0) { - msg = `${msg} ${TAB} **Ignored Alerts** ${NXT_LINE}`; - site.ignoredAlerts.forEach((alert) => { - msg = - msg + - TAB + - `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; - }); - msg = msg + NXT_LINE; - } - } - msg = msg + NXT_LINE; - }); - if (msg.trim() !== "") { - msg = msg + NXT_LINE + runnerLink; - msg = msg + NXT_LINE + runnerID; - } - return msg; - }, - generateDifference: (newReport, oldReport) => { - newReport.updated = false; - const siteClone = []; - newReport.site.forEach((newReportSite) => { - // Check if the new report site already exists in the previous report - const previousSite = lodash_1.default.filter(oldReport.site, (s) => s["@name"] === newReportSite["@name"]); - // If does not exists add it to the array without further processing - if (previousSite.length === 0) { - newReport.updated = true; - siteClone.push(newReportSite); - } - else { - // deep clone the variable for further processing - const newSite = lodash_1.default.clone(newReportSite); - const currentAlerts = newReportSite.alerts; - const previousAlerts = previousSite[0].alerts; - const newAlerts = lodash_1.default.differenceBy(currentAlerts, previousAlerts, "pluginid"); - let removedAlerts = lodash_1.default.differenceBy(previousAlerts, currentAlerts, "pluginid"); - let ignoredAlerts = []; - if ((0, FilteredSite_1.isFilteredSite)(newReportSite) && (0, FilteredSite_1.isFilteredSite)(previousSite[0])) { - ignoredAlerts = lodash_1.default.differenceBy(newReportSite.ignoredAlerts, previousSite[0].ignoredAlerts, "pluginid"); - } - else if ((0, FilteredSite_1.isFilteredSite)(newReportSite)) { - ignoredAlerts = newReportSite.ignoredAlerts; - } - removedAlerts = lodash_1.default.differenceBy(removedAlerts, ignoredAlerts, "pluginid"); - newSite.alerts = newAlerts; - newSite.removedAlerts = removedAlerts; - newSite.ignoredAlerts = ignoredAlerts; - siteClone.push(newSite); - if (newAlerts.length !== 0 || - removedAlerts.length !== 0 || - ignoredAlerts.length !== 0) { - newReport.updated = true; - } - } - }); - return siteClone; - }, - readMDFile: async (reportName) => { - let res = ""; - try { - res = fs_1.default.readFileSync(reportName, { encoding: "base64" }); - } - catch (err) { - console.log("error while reading the markdown file!"); - } - return res; - }, - checkIfAlertsExists: (jsonReport) => { - return jsonReport.site.some((s) => { - return "alerts" in s && s.alerts.length !== 0; - }); - }, - filterReport: async (jsonReport, plugins) => { - jsonReport.site.forEach((s) => { - if ("alerts" in s && s.alerts.length !== 0) { - console.log(`starting to filter the alerts for site: ${s["@name"]}`); - const newAlerts = s.alerts.filter(function (e) { - return !plugins.includes(e.pluginid); - }); - const removedAlerts = s.alerts.filter(function (e) { - return plugins.includes(e.pluginid); - }); - s.alerts = newAlerts; - s.ignoredAlerts = removedAlerts; - console.log(`#${newAlerts.length} alerts have been identified` + - ` and #${removedAlerts.length} alerts have been ignored for the site.`); - } - }); - return jsonReport; - }, - readPreviousReport: async (octokit, owner, repo, workSpace, runnerID, artifactName = "zap_scan") => { - let previousReport; - try { - const artifactList = await octokit.actions.listWorkflowRunArtifacts({ - owner: owner, - repo: repo, - run_id: runnerID, - }); - const artifacts = artifactList.data.artifacts; - let artifactID; - if (artifacts.length !== 0) { - artifacts.forEach((a) => { - if (a.name === artifactName) { - artifactID = a.id; - } - }); - } - if (artifactID !== undefined) { - const download = await octokit.actions.downloadArtifact({ - owner: owner, - repo: repo, - artifact_id: artifactID, - archive_format: "zip", - }); - const zip = new adm_zip_1.default(Buffer.from(download.data)); - const zipEntries = zip.getEntries(); - zipEntries.forEach(function (zipEntry) { - if (zipEntry.entryName === "report_json.json") { - previousReport = JSON.parse(zipEntry.getData().toString("utf8")); - } - }); - } - } - catch (e) { - console.log(`Error occurred while downloading the artifacts!`); - } - return previousReport; - }, - uploadArtifacts: async (rootDir, mdReport, jsonReport, htmlReport, artifactName = "zap_scan") => { - const artifactClient = (0, artifact_1.create)(); - const files = [ - `${rootDir}/${mdReport}`, - `${rootDir}/${jsonReport}`, - `${rootDir}/${htmlReport}`, - ]; - const rootDirectory = rootDir; - const options = { - continueOnError: true, - }; - await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options); - }, -}; -exports["default"] = actionHelper; - - -/***/ }), - -/***/ 4049: -/***/ (function(__unused_webpack_module, exports, __nccwpck_require__) { - -"use strict"; - -var __importDefault = (this && this.__importDefault) || function (mod) { - return (mod && mod.__esModule) ? mod : { "default": mod }; -}; -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.helper = exports.main = void 0; -const fs_1 = __importDefault(__nccwpck_require__(7147)); -const github_1 = __nccwpck_require__(5438); -const action_helper_1 = __importDefault(__nccwpck_require__(4194)); -const actionCommon = { - processReport: async (token, workSpace, plugins, currentRunnerID, issueTitle, repoName, allowIssueWriting = true, artifactName = "zap_scan") => { - const jsonReportName = "report_json.json"; - const mdReportName = "report_md.md"; - const htmlReportName = "report_html.html"; - if (!allowIssueWriting) { - await action_helper_1.default.uploadArtifacts(workSpace, mdReportName, jsonReportName, htmlReportName, artifactName); - return; - } - let openIssue; - let currentReport; - let previousRunnerID; - let previousReport = {}; - let create_new_issue = false; - const tmp = repoName.split("/"); - const owner = tmp[0]; - const repo = tmp[1]; - const octokit = (0, github_1.getOctokit)(token, { - baseUrl: process.env.GITHUB_API_URL, - }).rest; - try { - const jReportFile = fs_1.default.readFileSync(`${workSpace}/${jsonReportName}`); - currentReport = JSON.parse(jReportFile.toString()); - } - catch (e) { - console.log("Failed to locate the json report generated by ZAP Scan!"); - return; - } - const issues = await octokit.search.issuesAndPullRequests({ - q: encodeURI(`is:issue state:open repo:${owner}/${repo} ${issueTitle}`).replace(/%20/g, "+"), - sort: "updated", - }); - // If there is no existing open issue then create a new issue - if (issues.data.items.length === 0) { - create_new_issue = true; - } - else { - let login = "github-actions[bot]"; - try { - login = (await octokit.users.getAuthenticated()).data.login; - } - catch (e) { - console.log(`Using ${login} to search for issues.`); - } - // Sometimes search API returns recently closed issue as an open issue - for (const issue of issues.data.items) { - if (issue.state === "open" && issue.user.login === login) { - openIssue = issue; - break; - } - } - if (openIssue === undefined) { - create_new_issue = true; - } - else { - console.log(`Ongoing open issue has been identified #${openIssue.number}`); - // If there is no comments then read the body - if (openIssue.comments === 0) { - previousRunnerID = action_helper_1.default.getRunnerID(openIssue.body); - } - else { - const comments = await octokit.issues.listComments({ - owner: owner, - repo: repo, - issue_number: openIssue.number, - }); - let lastBotComment; - const lastCommentIndex = comments.data.length - 1; - for (let i = lastCommentIndex; i >= 0; i--) { - if (comments.data[i].user.login === login) { - lastBotComment = comments.data[i]; - break; - } - } - if (lastBotComment === undefined) { - previousRunnerID = action_helper_1.default.getRunnerID(openIssue.body); - } - else { - previousRunnerID = action_helper_1.default.getRunnerID(lastBotComment.body); - } - } - if (previousRunnerID !== null) { - previousReport = await action_helper_1.default.readPreviousReport(octokit, owner, repo, workSpace, previousRunnerID, artifactName); - if (previousReport === undefined) { - create_new_issue = true; - } - } - } - } - if (plugins.length !== 0) { - console.log(`${plugins.length} plugins will be ignored according to the rules configuration`); - currentReport = await action_helper_1.default.filterReport(currentReport, plugins); - // Update the newly filtered report - fs_1.default.unlinkSync(`${workSpace}/${jsonReportName}`); - fs_1.default.writeFileSync(`${workSpace}/${jsonReportName}`, JSON.stringify(currentReport)); - console.log("The current report is updated with the ignored alerts!"); - } - const newAlertExits = action_helper_1.default.checkIfAlertsExists(currentReport); - console.log(`Alerts present in the current report: ${newAlertExits.toString()}`); - if (!newAlertExits) { - // If no new alerts have been found close the issue - console.log("No new alerts have been identified by the ZAP Scan"); - if (openIssue != null && openIssue.state === "open") { - // close the issue with a comment - console.log(`Starting to close the issue #${openIssue.number}`); - try { - await octokit.issues.createComment({ - owner: owner, - repo: repo, - issue_number: openIssue.number, - body: "All the alerts have been resolved during the last ZAP Scan!", - }); - await octokit.issues.update({ - owner: owner, - repo: repo, - issue_number: openIssue.number, - state: "closed", - }); - console.log(`Successfully closed the issue #${openIssue.number}`); - } - catch (err) { - console.log(`Error occurred while closing the issue with a comment! err: ${err.toString()}`); - } - } - else if (openIssue != null && openIssue.state === "closed") { - console.log("No alerts found by ZAP Scan and no active issue is found in the repository, exiting the program!"); - } - return; - } - const runnerInfo = `RunnerID:${currentRunnerID}`; - const runnerLink = `View the [following link](${github_1.context.serverUrl}/${owner}/${repo}/actions/runs/${currentRunnerID})` + - ` to download the report.`; - if (create_new_issue) { - const msg = action_helper_1.default.createMessage(currentReport.site, runnerInfo, runnerLink) + - "\n\n---\nZAP is supported by the [Crash Override Open Source Fellowship](https://crashoverride.com/?zap=act)"; - const newIssue = await octokit.issues.create({ - owner: owner, - repo: repo, - title: issueTitle, - body: msg, - }); - console.log(`Process completed successfully and a new issue #${newIssue.data.number} has been created for the ZAP Scan.`); - } - else { - const siteClone = action_helper_1.default.generateDifference(currentReport, previousReport); - if (currentReport.updated) { - console.log("The current report has changes compared to the previous report"); - try { - const msg = action_helper_1.default.createMessage(siteClone, runnerInfo, runnerLink); - await octokit.issues.createComment({ - owner: owner, - repo: repo, - issue_number: openIssue.number, - body: msg, - }); - console.log(`The issue #${openIssue.number} has been updated with the latest ZAP scan results!`); - console.log("ZAP Scan process completed successfully!"); - } - catch (err) { - console.log(`Error occurred while updating the issue #${openIssue.number} with the latest ZAP scan: ${err.toString()}`); - } - } - else { - console.log("No changes have been observed from the previous scan and current scan!, exiting the program!"); - } - } - await action_helper_1.default.uploadArtifacts(workSpace, mdReportName, jsonReportName, htmlReportName, artifactName); - }, -}; -exports.main = actionCommon; -exports.helper = action_helper_1.default; - - -/***/ }), - -/***/ 4598: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isDifferenceSite = void 0; -function isDifferenceSite(site) { - return Array.isArray(site.removedAlerts); -} -exports.isDifferenceSite = isDifferenceSite; - - -/***/ }), - -/***/ 8336: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - -Object.defineProperty(exports, "__esModule", ({ value: true })); -exports.isFilteredSite = void 0; -function isFilteredSite(site) { - return Array.isArray(site.ignoredAlerts); -} -exports.isFilteredSite = isFilteredSite; - - -/***/ }), - -/***/ 6761: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const Utils = __nccwpck_require__(5182); -const pth = __nccwpck_require__(1017); -const ZipEntry = __nccwpck_require__(4057); -const ZipFile = __nccwpck_require__(7744); - -const get_Bool = (val, def) => (typeof val === "boolean" ? val : def); -const get_Str = (val, def) => (typeof val === "string" ? val : def); - -const defaultOptions = { - // option "noSort" : if true it disables files sorting - noSort: false, - // read entries during load (initial loading may be slower) - readEntries: false, - // default method is none - method: Utils.Constants.NONE, - // file system - fs: null -}; - -module.exports = function (/**String*/ input, /** object */ options) { - let inBuffer = null; - - // create object based default options, allowing them to be overwritten - const opts = Object.assign(Object.create(null), defaultOptions); - - // test input variable - if (input && "object" === typeof input) { - // if value is not buffer we accept it to be object with options - if (!(input instanceof Uint8Array)) { - Object.assign(opts, input); - input = opts.input ? opts.input : undefined; - if (opts.input) delete opts.input; - } - - // if input is buffer - if (Buffer.isBuffer(input)) { - inBuffer = input; - opts.method = Utils.Constants.BUFFER; - input = undefined; - } - } - - // assign options - Object.assign(opts, options); - - // instanciate utils filesystem - const filetools = new Utils(opts); - - // if input is file name we retrieve its content - if (input && "string" === typeof input) { - // load zip file - if (filetools.fs.existsSync(input)) { - opts.method = Utils.Constants.FILE; - opts.filename = input; - inBuffer = filetools.fs.readFileSync(input); - } else { - throw new Error(Utils.Errors.INVALID_FILENAME); - } - } - - // create variable - const _zip = new ZipFile(inBuffer, opts); - - const { canonical, sanitize } = Utils; - - function getEntry(/**Object*/ entry) { - if (entry && _zip) { - var item; - // If entry was given as a file name - if (typeof entry === "string") item = _zip.getEntry(entry); - // if entry was given as a ZipEntry object - if (typeof entry === "object" && typeof entry.entryName !== "undefined" && typeof entry.header !== "undefined") item = _zip.getEntry(entry.entryName); - - if (item) { - return item; - } - } - return null; - } - - function fixPath(zipPath) { - const { join, normalize, sep } = pth.posix; - // convert windows file separators and normalize - return join(".", normalize(sep + zipPath.split("\\").join(sep) + sep)); - } - - return { - /** - * Extracts the given entry from the archive and returns the content as a Buffer object - * @param entry ZipEntry object or String with the full path of the entry - * - * @return Buffer or Null in case of error - */ - readFile: function (/**Object*/ entry, /*String, Buffer*/ pass) { - var item = getEntry(entry); - return (item && item.getData(pass)) || null; - }, - - /** - * Asynchronous readFile - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * - * @return Buffer or Null in case of error - */ - readFileAsync: function (/**Object*/ entry, /**Function*/ callback) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(callback); - } else { - callback(null, "getEntry failed for:" + entry); - } - }, - - /** - * Extracts the given entry from the archive and returns the content as plain text in the given encoding - * @param entry ZipEntry object or String with the full path of the entry - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsText: function (/**Object*/ entry, /**String=*/ encoding) { - var item = getEntry(entry); - if (item) { - var data = item.getData(); - if (data && data.length) { - return data.toString(encoding || "utf8"); - } - } - return ""; - }, - - /** - * Asynchronous readAsText - * @param entry ZipEntry object or String with the full path of the entry - * @param callback - * @param encoding Optional. If no encoding is specified utf8 is used - * - * @return String - */ - readAsTextAsync: function (/**Object*/ entry, /**Function*/ callback, /**String=*/ encoding) { - var item = getEntry(entry); - if (item) { - item.getDataAsync(function (data, err) { - if (err) { - callback(data, err); - return; - } - - if (data && data.length) { - callback(data.toString(encoding || "utf8")); - } else { - callback(""); - } - }); - } else { - callback(""); - } - }, - - /** - * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory - * - * @param entry - */ - deleteFile: function (/**Object*/ entry) { - // @TODO: test deleteFile - var item = getEntry(entry); - if (item) { - _zip.deleteEntry(item.entryName); - } - }, - - /** - * Adds a comment to the zip. The zip must be rewritten after adding the comment. - * - * @param comment - */ - addZipComment: function (/**String*/ comment) { - // @TODO: test addZipComment - _zip.comment = comment; - }, - - /** - * Returns the zip comment - * - * @return String - */ - getZipComment: function () { - return _zip.comment || ""; - }, - - /** - * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment - * The comment cannot exceed 65535 characters in length - * - * @param entry - * @param comment - */ - addZipEntryComment: function (/**Object*/ entry, /**String*/ comment) { - var item = getEntry(entry); - if (item) { - item.comment = comment; - } - }, - - /** - * Returns the comment of the specified entry - * - * @param entry - * @return String - */ - getZipEntryComment: function (/**Object*/ entry) { - var item = getEntry(entry); - if (item) { - return item.comment || ""; - } - return ""; - }, - - /** - * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content - * - * @param entry - * @param content - */ - updateFile: function (/**Object*/ entry, /**Buffer*/ content) { - var item = getEntry(entry); - if (item) { - item.setData(content); - } - }, - - /** - * Adds a file from the disk to the archive - * - * @param localPath File to add to zip - * @param zipPath Optional path inside the zip - * @param zipName Optional name for the file - */ - addLocalFile: function (/**String*/ localPath, /**String=*/ zipPath, /**String=*/ zipName, /**String*/ comment) { - if (filetools.fs.existsSync(localPath)) { - // fix ZipPath - zipPath = zipPath ? fixPath(zipPath) : ""; - - // p - local file name - var p = localPath.split("\\").join("/").split("/").pop(); - - // add file name into zippath - zipPath += zipName ? zipName : p; - - // read file attributes - const _attr = filetools.fs.statSync(localPath); - - // add file into zip file - this.addFile(zipPath, filetools.fs.readFileSync(localPath), comment, _attr); - } else { - throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } - }, - - /** - * Adds a local directory and all its nested files and directories to the archive - * - * @param localPath - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object - */ - addLocalFolder: function (/**String*/ localPath, /**String=*/ zipPath, /**=RegExp|Function*/ filter, /**=number|object*/ attr) { - // Prepare filter - if (filter instanceof RegExp) { - // if filter is RegExp wrap it - filter = (function (rx) { - return function (filename) { - return rx.test(filename); - }; - })(filter); - } else if ("function" !== typeof filter) { - // if filter is not function we will replace it - filter = function () { - return true; - }; - } - - // fix ZipPath - zipPath = zipPath ? fixPath(zipPath) : ""; - - // normalize the path first - localPath = pth.normalize(localPath); - - if (filetools.fs.existsSync(localPath)) { - const items = filetools.findFiles(localPath); - const self = this; - - if (items.length) { - items.forEach(function (filepath) { - var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix - if (filter(p)) { - var stats = filetools.fs.statSync(filepath); - if (stats.isFile()) { - self.addFile(zipPath + p, filetools.fs.readFileSync(filepath), "", attr ? attr : stats); - } else { - self.addFile(zipPath + p + "/", Buffer.alloc(0), "", attr ? attr : stats); - } - } - }); - } - } else { - throw new Error(Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } - }, - - /** - * Asynchronous addLocalFile - * @param localPath - * @param callback - * @param zipPath optional path inside zip - * @param filter optional RegExp or Function if files match will - * be included. - */ - addLocalFolderAsync: function (/*String*/ localPath, /*Function*/ callback, /*String*/ zipPath, /*RegExp|Function*/ filter) { - if (filter instanceof RegExp) { - filter = (function (rx) { - return function (filename) { - return rx.test(filename); - }; - })(filter); - } else if ("function" !== typeof filter) { - filter = function () { - return true; - }; - } - - // fix ZipPath - zipPath = zipPath ? fixPath(zipPath) : ""; - - // normalize the path first - localPath = pth.normalize(localPath); - - var self = this; - filetools.fs.open(localPath, "r", function (err) { - if (err && err.code === "ENOENT") { - callback(undefined, Utils.Errors.FILE_NOT_FOUND.replace("%s", localPath)); - } else if (err) { - callback(undefined, err); - } else { - var items = filetools.findFiles(localPath); - var i = -1; - - var next = function () { - i += 1; - if (i < items.length) { - var filepath = items[i]; - var p = pth.relative(localPath, filepath).split("\\").join("/"); //windows fix - p = p - .normalize("NFD") - .replace(/[\u0300-\u036f]/g, "") - .replace(/[^\x20-\x7E]/g, ""); // accent fix - if (filter(p)) { - filetools.fs.stat(filepath, function (er0, stats) { - if (er0) callback(undefined, er0); - if (stats.isFile()) { - filetools.fs.readFile(filepath, function (er1, data) { - if (er1) { - callback(undefined, er1); - } else { - self.addFile(zipPath + p, data, "", stats); - next(); - } - }); - } else { - self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); - next(); - } - }); - } else { - process.nextTick(() => { - next(); - }); - } - } else { - callback(true, undefined); - } - }; - - next(); - } - }); - }, - - /** - * - * @param {string} localPath - path where files will be extracted - * @param {object} props - optional properties - * @param {string} props.zipPath - optional path inside zip - * @param {regexp, function} props.filter - RegExp or Function if files match will be included. - */ - addLocalFolderPromise: function (/*String*/ localPath, /* object */ props) { - return new Promise((resolve, reject) => { - const { filter, zipPath } = Object.assign({}, props); - this.addLocalFolderAsync( - localPath, - (done, err) => { - if (err) reject(err); - if (done) resolve(this); - }, - zipPath, - filter - ); - }); - }, - - /** - * Allows you to create a entry (file or directory) in the zip file. - * If you want to create a directory the entryName must end in / and a null buffer should be provided. - * Comment and attributes are optional - * - * @param {string} entryName - * @param {Buffer | string} content - file content as buffer or utf8 coded string - * @param {string} comment - file comment - * @param {number | object} attr - number as unix file permissions, object as filesystem Stats object - */ - addFile: function (/**String*/ entryName, /**Buffer*/ content, /**String*/ comment, /**Number*/ attr) { - let entry = getEntry(entryName); - const update = entry != null; - - // prepare new entry - if (!update) { - entry = new ZipEntry(); - entry.entryName = entryName; - } - entry.comment = comment || ""; - - const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; - - // last modification time from file stats - if (isStat) { - entry.header.time = attr.mtime; - } - - // Set file attribute - var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) - - // extended attributes field for Unix - // set file type either S_IFDIR / S_IFREG - let unix = entry.isDirectory ? 0x4000 : 0x8000; - - if (isStat) { - // File attributes from file stats - unix |= 0xfff & attr.mode; - } else if ("number" === typeof attr) { - // attr from given attr values - unix |= 0xfff & attr; - } else { - // Default values: - unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) - } - - fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes - - entry.attr = fileattr; - - entry.setData(content); - if (!update) _zip.setEntry(entry); - }, - - /** - * Returns an array of ZipEntry objects representing the files and folders inside the archive - * - * @return Array - */ - getEntries: function () { - return _zip ? _zip.entries : []; - }, - - /** - * Returns a ZipEntry object representing the file or folder specified by ``name``. - * - * @param name - * @return ZipEntry - */ - getEntry: function (/**String*/ name) { - return getEntry(name); - }, - - getEntryCount: function () { - return _zip.getEntryCount(); - }, - - forEach: function (callback) { - return _zip.forEach(callback); - }, - - /** - * Extracts the given entry to the given targetPath - * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted - * - * @param entry ZipEntry object or String with the full path of the entry - * @param targetPath Target folder where to write the file - * @param maintainEntryPath If maintainEntryPath is true and the entry is inside a folder, the entry folder - * will be created in targetPath as well. Default is TRUE - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. - * Default is FALSE - * @param outFileName String If set will override the filename of the extracted file (Only works if the entry is a file) - * - * @return Boolean - */ - extractEntryTo: function ( - /**Object*/ entry, - /**String*/ targetPath, - /**Boolean*/ maintainEntryPath, - /**Boolean*/ overwrite, - /**Boolean*/ keepOriginalPermission, - /**String**/ outFileName - ) { - overwrite = get_Bool(overwrite, false); - keepOriginalPermission = get_Bool(keepOriginalPermission, false); - maintainEntryPath = get_Bool(maintainEntryPath, true); - outFileName = get_Str(outFileName, get_Str(keepOriginalPermission, undefined)); - - var item = getEntry(entry); - if (!item) { - throw new Error(Utils.Errors.NO_ENTRY); - } - - var entryName = canonical(item.entryName); - - var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); - - if (item.isDirectory) { - var children = _zip.getEntryChildren(item); - children.forEach(function (child) { - if (child.isDirectory) return; - var content = child.getData(); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - var name = canonical(child.entryName); - var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); - // The reverse operation for attr depend on method addFile() - const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; - filetools.writeFileTo(childName, content, overwrite, fileAttr); - }); - return true; - } - - var content = item.getData(); - if (!content) throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - - if (filetools.fs.existsSync(target) && !overwrite) { - throw new Error(Utils.Errors.CANT_OVERRIDE); - } - // The reverse operation for attr depend on method addFile() - const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; - filetools.writeFileTo(target, content, overwrite, fileAttr); - - return true; - }, - - /** - * Test the archive - * - */ - test: function (pass) { - if (!_zip) { - return false; - } - - for (var entry in _zip.entries) { - try { - if (entry.isDirectory) { - continue; - } - var content = _zip.entries[entry].getData(pass); - if (!content) { - return false; - } - } catch (err) { - return false; - } - } - return true; - }, - - /** - * Extracts the entire archive to the given location - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. - * Default is FALSE - */ - extractAllTo: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /*String, Buffer*/ pass) { - overwrite = get_Bool(overwrite, false); - pass = get_Str(keepOriginalPermission, pass); - keepOriginalPermission = get_Bool(keepOriginalPermission, false); - if (!_zip) { - throw new Error(Utils.Errors.NO_ZIP); - } - _zip.entries.forEach(function (entry) { - var entryName = sanitize(targetPath, canonical(entry.entryName.toString())); - if (entry.isDirectory) { - filetools.makeDir(entryName); - return; - } - var content = entry.getData(pass); - if (!content) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - // The reverse operation for attr depend on method addFile() - const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; - filetools.writeFileTo(entryName, content, overwrite, fileAttr); - try { - filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); - } catch (err) { - throw new Error(Utils.Errors.CANT_EXTRACT_FILE); - } - }); - }, - - /** - * Asynchronous extractAllTo - * - * @param targetPath Target location - * @param overwrite If the file already exists at the target path, the file will be overwriten if this is true. - * Default is FALSE - * @param keepOriginalPermission The file will be set as the permission from the entry if this is true. - * Default is FALSE - * @param callback The callback will be executed when all entries are extracted successfully or any error is thrown. - */ - extractAllToAsync: function (/**String*/ targetPath, /**Boolean*/ overwrite, /**Boolean*/ keepOriginalPermission, /**Function*/ callback) { - overwrite = get_Bool(overwrite, false); - if (typeof keepOriginalPermission === "function" && !callback) callback = keepOriginalPermission; - keepOriginalPermission = get_Bool(keepOriginalPermission, false); - if (!callback) { - callback = function (err) { - throw new Error(err); - }; - } - if (!_zip) { - callback(new Error(Utils.Errors.NO_ZIP)); - return; - } - - targetPath = pth.resolve(targetPath); - // convert entryName to - const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName.toString()))); - const getError = (msg, file) => new Error(msg + ': "' + file + '"'); - - // separate directories from files - const dirEntries = []; - const fileEntries = new Set(); - _zip.entries.forEach((e) => { - if (e.isDirectory) { - dirEntries.push(e); - } else { - fileEntries.add(e); - } - }); - - // Create directory entries first synchronously - // this prevents race condition and assures folders are there before writing files - for (const entry of dirEntries) { - const dirPath = getPath(entry); - // The reverse operation for attr depend on method addFile() - const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; - try { - filetools.makeDir(dirPath); - if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); - // in unix timestamp will change if files are later added to folder, but still - filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); - } catch (er) { - callback(getError("Unable to create folder", dirPath)); - } - } - - // callback wrapper, for some house keeping - const done = () => { - if (fileEntries.size === 0) { - callback(); - } - }; - - // Extract file entries asynchronously - for (const entry of fileEntries.values()) { - const entryName = pth.normalize(canonical(entry.entryName.toString())); - const filePath = sanitize(targetPath, entryName); - entry.getDataAsync(function (content, err_1) { - if (err_1) { - callback(new Error(err_1)); - return; - } - if (!content) { - callback(new Error(Utils.Errors.CANT_EXTRACT_FILE)); - } else { - // The reverse operation for attr depend on method addFile() - const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; - filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { - if (!succ) { - callback(getError("Unable to write file", filePath)); - return; - } - filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { - if (err_2) { - callback(getError("Unable to set times", filePath)); - return; - } - fileEntries.delete(entry); - // call the callback if it was last entry - done(); - }); - }); - } - }); - } - // call the callback if fileEntries was empty - done(); - }, - - /** - * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip - * - * @param targetFileName - * @param callback - */ - writeZip: function (/**String*/ targetFileName, /**Function*/ callback) { - if (arguments.length === 1) { - if (typeof targetFileName === "function") { - callback = targetFileName; - targetFileName = ""; - } - } - - if (!targetFileName && opts.filename) { - targetFileName = opts.filename; - } - if (!targetFileName) return; - - var zipData = _zip.compressToBuffer(); - if (zipData) { - var ok = filetools.writeFileTo(targetFileName, zipData, true); - if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); - } - }, - - writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { - const { overwrite, perm } = Object.assign({ overwrite: true }, props); - - return new Promise((resolve, reject) => { - // find file name - if (!targetFileName && opts.filename) targetFileName = opts.filename; - if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); - - this.toBufferPromise().then((zipData) => { - const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); - filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); - }, reject); - }); - }, - - toBufferPromise: function () { - return new Promise((resolve, reject) => { - _zip.toAsyncBuffer(resolve, reject); - }); - }, - - /** - * Returns the content of the entire zip file as a Buffer object - * - * @return Buffer - */ - toBuffer: function (/**Function=*/ onSuccess, /**Function=*/ onFail, /**Function=*/ onItemStart, /**Function=*/ onItemEnd) { - this.valueOf = 2; - if (typeof onSuccess === "function") { - _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); - return null; - } - return _zip.compressToBuffer(); - } - }; -}; - - -/***/ }), - -/***/ 9032: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Utils = __nccwpck_require__(5182), - Constants = Utils.Constants; - -/* The central directory file header */ -module.exports = function () { - var _verMade = 20, // v2.0 - _version = 10, // v1.0 - _flags = 0, - _method = 0, - _time = 0, - _crc = 0, - _compressedSize = 0, - _size = 0, - _fnameLen = 0, - _extraLen = 0, - _comLen = 0, - _diskStart = 0, - _inattr = 0, - _attr = 0, - _offset = 0; - - _verMade |= Utils.isWin ? 0x0a00 : 0x0300; - - // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. - // Without it file names may be corrupted for other apps when file names use unicode chars - _flags |= Constants.FLG_EFS; - - var _dataHeader = {}; - - function setTime(val) { - val = new Date(val); - _time = - (((val.getFullYear() - 1980) & 0x7f) << 25) | // b09-16 years from 1980 - ((val.getMonth() + 1) << 21) | // b05-08 month - (val.getDate() << 16) | // b00-04 hour - // 2 bytes time - (val.getHours() << 11) | // b11-15 hour - (val.getMinutes() << 5) | // b05-10 minute - (val.getSeconds() >> 1); // b00-04 seconds divided by 2 - } - - setTime(+new Date()); - - return { - get made() { - return _verMade; - }, - set made(val) { - _verMade = val; - }, - - get version() { - return _version; - }, - set version(val) { - _version = val; - }, - - get flags() { - return _flags; - }, - set flags(val) { - _flags = val; - }, - - get method() { - return _method; - }, - set method(val) { - switch (val) { - case Constants.STORED: - this.version = 10; - case Constants.DEFLATED: - default: - this.version = 20; - } - _method = val; - }, - - get time() { - return new Date(((_time >> 25) & 0x7f) + 1980, ((_time >> 21) & 0x0f) - 1, (_time >> 16) & 0x1f, (_time >> 11) & 0x1f, (_time >> 5) & 0x3f, (_time & 0x1f) << 1); - }, - set time(val) { - setTime(val); - }, - - get crc() { - return _crc; - }, - set crc(val) { - _crc = Math.max(0, val) >>> 0; - }, - - get compressedSize() { - return _compressedSize; - }, - set compressedSize(val) { - _compressedSize = Math.max(0, val) >>> 0; - }, - - get size() { - return _size; - }, - set size(val) { - _size = Math.max(0, val) >>> 0; - }, - - get fileNameLength() { - return _fnameLen; - }, - set fileNameLength(val) { - _fnameLen = val; - }, - - get extraLength() { - return _extraLen; - }, - set extraLength(val) { - _extraLen = val; - }, - - get commentLength() { - return _comLen; - }, - set commentLength(val) { - _comLen = val; - }, - - get diskNumStart() { - return _diskStart; - }, - set diskNumStart(val) { - _diskStart = Math.max(0, val) >>> 0; - }, - - get inAttr() { - return _inattr; - }, - set inAttr(val) { - _inattr = Math.max(0, val) >>> 0; - }, - - get attr() { - return _attr; - }, - set attr(val) { - _attr = Math.max(0, val) >>> 0; - }, - - // get Unix file permissions - get fileAttr() { - return _attr ? (((_attr >>> 0) | 0) >> 16) & 0xfff : 0; - }, - - get offset() { - return _offset; - }, - set offset(val) { - _offset = Math.max(0, val) >>> 0; - }, - - get encripted() { - return (_flags & 1) === 1; - }, - - get entryHeaderSize() { - return Constants.CENHDR + _fnameLen + _extraLen + _comLen; - }, - - get realDataOffset() { - return _offset + Constants.LOCHDR + _dataHeader.fnameLen + _dataHeader.extraLen; - }, - - get dataHeader() { - return _dataHeader; - }, - - loadDataHeaderFromBinary: function (/*Buffer*/ input) { - var data = input.slice(_offset, _offset + Constants.LOCHDR); - // 30 bytes and should start with "PK\003\004" - if (data.readUInt32LE(0) !== Constants.LOCSIG) { - throw new Error(Utils.Errors.INVALID_LOC); - } - _dataHeader = { - // version needed to extract - version: data.readUInt16LE(Constants.LOCVER), - // general purpose bit flag - flags: data.readUInt16LE(Constants.LOCFLG), - // compression method - method: data.readUInt16LE(Constants.LOCHOW), - // modification time (2 bytes time, 2 bytes date) - time: data.readUInt32LE(Constants.LOCTIM), - // uncompressed file crc-32 value - crc: data.readUInt32LE(Constants.LOCCRC), - // compressed size - compressedSize: data.readUInt32LE(Constants.LOCSIZ), - // uncompressed size - size: data.readUInt32LE(Constants.LOCLEN), - // filename length - fnameLen: data.readUInt16LE(Constants.LOCNAM), - // extra field length - extraLen: data.readUInt16LE(Constants.LOCEXT) - }; - }, - - loadFromBinary: function (/*Buffer*/ data) { - // data should be 46 bytes and start with "PK 01 02" - if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { - throw new Error(Utils.Errors.INVALID_CEN); - } - // version made by - _verMade = data.readUInt16LE(Constants.CENVEM); - // version needed to extract - _version = data.readUInt16LE(Constants.CENVER); - // encrypt, decrypt flags - _flags = data.readUInt16LE(Constants.CENFLG); - // compression method - _method = data.readUInt16LE(Constants.CENHOW); - // modification time (2 bytes time, 2 bytes date) - _time = data.readUInt32LE(Constants.CENTIM); - // uncompressed file crc-32 value - _crc = data.readUInt32LE(Constants.CENCRC); - // compressed size - _compressedSize = data.readUInt32LE(Constants.CENSIZ); - // uncompressed size - _size = data.readUInt32LE(Constants.CENLEN); - // filename length - _fnameLen = data.readUInt16LE(Constants.CENNAM); - // extra field length - _extraLen = data.readUInt16LE(Constants.CENEXT); - // file comment length - _comLen = data.readUInt16LE(Constants.CENCOM); - // volume number start - _diskStart = data.readUInt16LE(Constants.CENDSK); - // internal file attributes - _inattr = data.readUInt16LE(Constants.CENATT); - // external file attributes - _attr = data.readUInt32LE(Constants.CENATX); - // LOC header offset - _offset = data.readUInt32LE(Constants.CENOFF); - }, - - dataHeaderToBinary: function () { - // LOC header size (30 bytes) - var data = Buffer.alloc(Constants.LOCHDR); - // "PK\003\004" - data.writeUInt32LE(Constants.LOCSIG, 0); - // version needed to extract - data.writeUInt16LE(_version, Constants.LOCVER); - // general purpose bit flag - data.writeUInt16LE(_flags, Constants.LOCFLG); - // compression method - data.writeUInt16LE(_method, Constants.LOCHOW); - // modification time (2 bytes time, 2 bytes date) - data.writeUInt32LE(_time, Constants.LOCTIM); - // uncompressed file crc-32 value - data.writeUInt32LE(_crc, Constants.LOCCRC); - // compressed size - data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); - // uncompressed size - data.writeUInt32LE(_size, Constants.LOCLEN); - // filename length - data.writeUInt16LE(_fnameLen, Constants.LOCNAM); - // extra field length - data.writeUInt16LE(_extraLen, Constants.LOCEXT); - return data; - }, - - entryHeaderToBinary: function () { - // CEN header size (46 bytes) - var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); - // "PK\001\002" - data.writeUInt32LE(Constants.CENSIG, 0); - // version made by - data.writeUInt16LE(_verMade, Constants.CENVEM); - // version needed to extract - data.writeUInt16LE(_version, Constants.CENVER); - // encrypt, decrypt flags - data.writeUInt16LE(_flags, Constants.CENFLG); - // compression method - data.writeUInt16LE(_method, Constants.CENHOW); - // modification time (2 bytes time, 2 bytes date) - data.writeUInt32LE(_time, Constants.CENTIM); - // uncompressed file crc-32 value - data.writeUInt32LE(_crc, Constants.CENCRC); - // compressed size - data.writeUInt32LE(_compressedSize, Constants.CENSIZ); - // uncompressed size - data.writeUInt32LE(_size, Constants.CENLEN); - // filename length - data.writeUInt16LE(_fnameLen, Constants.CENNAM); - // extra field length - data.writeUInt16LE(_extraLen, Constants.CENEXT); - // file comment length - data.writeUInt16LE(_comLen, Constants.CENCOM); - // volume number start - data.writeUInt16LE(_diskStart, Constants.CENDSK); - // internal file attributes - data.writeUInt16LE(_inattr, Constants.CENATT); - // external file attributes - data.writeUInt32LE(_attr, Constants.CENATX); - // LOC header offset - data.writeUInt32LE(_offset, Constants.CENOFF); - // fill all with - data.fill(0x00, Constants.CENHDR); - return data; - }, - - toJSON: function () { - const bytes = function (nr) { - return nr + " bytes"; - }; - - return { - made: _verMade, - version: _version, - flags: _flags, - method: Utils.methodToString(_method), - time: this.time, - crc: "0x" + _crc.toString(16).toUpperCase(), - compressedSize: bytes(_compressedSize), - size: bytes(_size), - fileNameLength: bytes(_fnameLen), - extraLength: bytes(_extraLen), - commentLength: bytes(_comLen), - diskNumStart: _diskStart, - inAttr: _inattr, - attr: _attr, - offset: _offset, - entryHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) - }; - }, - - toString: function () { - return JSON.stringify(this.toJSON(), null, "\t"); - } - }; -}; - - -/***/ }), - -/***/ 4958: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -exports.EntryHeader = __nccwpck_require__(9032); -exports.MainHeader = __nccwpck_require__(8952); - - -/***/ }), - -/***/ 8952: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Utils = __nccwpck_require__(5182), - Constants = Utils.Constants; - -/* The entries in the end of central directory */ -module.exports = function () { - var _volumeEntries = 0, - _totalEntries = 0, - _size = 0, - _offset = 0, - _commentLength = 0; - - return { - get diskEntries() { - return _volumeEntries; - }, - set diskEntries(/*Number*/ val) { - _volumeEntries = _totalEntries = val; - }, - - get totalEntries() { - return _totalEntries; - }, - set totalEntries(/*Number*/ val) { - _totalEntries = _volumeEntries = val; - }, - - get size() { - return _size; - }, - set size(/*Number*/ val) { - _size = val; - }, - - get offset() { - return _offset; - }, - set offset(/*Number*/ val) { - _offset = val; - }, - - get commentLength() { - return _commentLength; - }, - set commentLength(/*Number*/ val) { - _commentLength = val; - }, - - get mainHeaderSize() { - return Constants.ENDHDR + _commentLength; - }, - - loadFromBinary: function (/*Buffer*/ data) { - // data should be 22 bytes and start with "PK 05 06" - // or be 56+ bytes and start with "PK 06 06" for Zip64 - if ( - (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && - (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) - ) { - throw new Error(Utils.Errors.INVALID_END); - } - - if (data.readUInt32LE(0) === Constants.ENDSIG) { - // number of entries on this volume - _volumeEntries = data.readUInt16LE(Constants.ENDSUB); - // total number of entries - _totalEntries = data.readUInt16LE(Constants.ENDTOT); - // central directory size in bytes - _size = data.readUInt32LE(Constants.ENDSIZ); - // offset of first CEN header - _offset = data.readUInt32LE(Constants.ENDOFF); - // zip file comment length - _commentLength = data.readUInt16LE(Constants.ENDCOM); - } else { - // number of entries on this volume - _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); - // total number of entries - _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); - // central directory size in bytes - _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); - // offset of first CEN header - _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); - - _commentLength = 0; - } - }, - - toBinary: function () { - var b = Buffer.alloc(Constants.ENDHDR + _commentLength); - // "PK 05 06" signature - b.writeUInt32LE(Constants.ENDSIG, 0); - b.writeUInt32LE(0, 4); - // number of entries on this volume - b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); - // total number of entries - b.writeUInt16LE(_totalEntries, Constants.ENDTOT); - // central directory size in bytes - b.writeUInt32LE(_size, Constants.ENDSIZ); - // offset of first CEN header - b.writeUInt32LE(_offset, Constants.ENDOFF); - // zip file comment length - b.writeUInt16LE(_commentLength, Constants.ENDCOM); - // fill comment memory with spaces so no garbage is left there - b.fill(" ", Constants.ENDHDR); - - return b; - }, - - toJSON: function () { - // creates 0x0000 style output - const offset = function (nr, len) { - let offs = nr.toString(16).toUpperCase(); - while (offs.length < len) offs = "0" + offs; - return "0x" + offs; - }; - - return { - diskEntries: _volumeEntries, - totalEntries: _totalEntries, - size: _size + " bytes", - offset: offset(_offset, 4), - commentLength: _commentLength - }; - }, - - toString: function () { - return JSON.stringify(this.toJSON(), null, "\t"); - } - }; -}; - // Misspelled - -/***/ }), - -/***/ 7686: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = function (/*Buffer*/ inbuf) { - var zlib = __nccwpck_require__(9796); - - var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; - - return { - deflate: function () { - return zlib.deflateRawSync(inbuf, opts); - }, - - deflateAsync: function (/*Function*/ callback) { - var tmp = zlib.createDeflateRaw(opts), - parts = [], - total = 0; - tmp.on("data", function (data) { - parts.push(data); - total += data.length; - }); - tmp.on("end", function () { - var buf = Buffer.alloc(total), - written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; - } - callback && callback(buf); - }); - tmp.end(inbuf); - } - }; -}; - - -/***/ }), - -/***/ 3928: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -exports.Deflater = __nccwpck_require__(7686); -exports.Inflater = __nccwpck_require__(2153); -exports.ZipCrypto = __nccwpck_require__(3228); - - -/***/ }), - -/***/ 2153: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = function (/*Buffer*/ inbuf) { - var zlib = __nccwpck_require__(9796); - - return { - inflate: function () { - return zlib.inflateRawSync(inbuf); - }, - - inflateAsync: function (/*Function*/ callback) { - var tmp = zlib.createInflateRaw(), - parts = [], - total = 0; - tmp.on("data", function (data) { - parts.push(data); - total += data.length; - }); - tmp.on("end", function () { - var buf = Buffer.alloc(total), - written = 0; - buf.fill(0); - for (var i = 0; i < parts.length; i++) { - var part = parts[i]; - part.copy(buf, written); - written += part.length; - } - callback && callback(buf); - }); - tmp.end(inbuf); - } - }; -}; - - -/***/ }), - -/***/ 3228: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -// node crypt, we use it for generate salt -// eslint-disable-next-line node/no-unsupported-features/node-builtins -const { randomFillSync } = __nccwpck_require__(6113); - -// generate CRC32 lookup table -const crctable = new Uint32Array(256).map((t, crc) => { - for (let j = 0; j < 8; j++) { - if (0 !== (crc & 1)) { - crc = (crc >>> 1) ^ 0xedb88320; - } else { - crc >>>= 1; - } - } - return crc >>> 0; -}); - -// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) -const uMul = (a, b) => Math.imul(a, b) >>> 0; - -// crc32 byte single update (actually same function is part of utils.crc32 function :) ) -const crc32update = (pCrc32, bval) => { - return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); -}; - -// function for generating salt for encrytion header -const genSalt = () => { - if ("function" === typeof randomFillSync) { - return randomFillSync(Buffer.alloc(12)); - } else { - // fallback if function is not defined - return genSalt.node(); - } -}; - -// salt generation with node random function (mainly as fallback) -genSalt.node = () => { - const salt = Buffer.alloc(12); - const len = salt.length; - for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; - return salt; -}; - -// general config -const config = { - genSalt -}; - -// Class Initkeys handles same basic ops with keys -function Initkeys(pw) { - const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); - this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); - for (let i = 0; i < pass.length; i++) { - this.updateKeys(pass[i]); - } -} - -Initkeys.prototype.updateKeys = function (byteValue) { - const keys = this.keys; - keys[0] = crc32update(keys[0], byteValue); - keys[1] += keys[0] & 0xff; - keys[1] = uMul(keys[1], 134775813) + 1; - keys[2] = crc32update(keys[2], keys[1] >>> 24); - return byteValue; -}; - -Initkeys.prototype.next = function () { - const k = (this.keys[2] | 2) >>> 0; // key - return (uMul(k, k ^ 1) >> 8) & 0xff; // decode -}; - -function make_decrypter(/*Buffer*/ pwd) { - // 1. Stage initialize key - const keys = new Initkeys(pwd); - - // return decrypter function - return function (/*Buffer*/ data) { - // result - we create new Buffer for results - const result = Buffer.alloc(data.length); - let pos = 0; - // process input data - for (let c of data) { - //c ^= keys.next(); - //result[pos++] = c; // decode & Save Value - result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte - } - return result; - }; -} - -function make_encrypter(/*Buffer*/ pwd) { - // 1. Stage initialize key - const keys = new Initkeys(pwd); - - // return encrypting function, result and pos is here so we dont have to merge buffers later - return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { - // result - we create new Buffer for results - if (!result) result = Buffer.alloc(data.length); - // process input data - for (let c of data) { - const k = keys.next(); // save key byte - result[pos++] = c ^ k; // save val - keys.updateKeys(c); // update keys with decoded byte - } - return result; - }; -} - -function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { - if (!data || !Buffer.isBuffer(data) || data.length < 12) { - return Buffer.alloc(0); - } - - // 1. We Initialize and generate decrypting function - const decrypter = make_decrypter(pwd); - - // 2. decrypt salt what is always 12 bytes and is a part of file content - const salt = decrypter(data.slice(0, 12)); - - // 3. does password meet expectations - if (salt[11] !== header.crc >>> 24) { - throw "ADM-ZIP: Wrong Password"; - } - - // 4. decode content - return decrypter(data.slice(12)); -} - -// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality -function _salter(data) { - if (Buffer.isBuffer(data) && data.length >= 12) { - // be aware - currently salting buffer data is modified - config.genSalt = function () { - return data.slice(0, 12); - }; - } else if (data === "node") { - // test salt generation with node random function - config.genSalt = genSalt.node; - } else { - // if value is not acceptable config gets reset. - config.genSalt = genSalt; - } -} - -function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { - // 1. test data if data is not Buffer we make buffer from it - if (data == null) data = Buffer.alloc(0); - // if data is not buffer be make buffer from it - if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); - - // 2. We Initialize and generate encrypting function - const encrypter = make_encrypter(pwd); - - // 3. generate salt (12-bytes of random data) - const salt = config.genSalt(); - salt[11] = (header.crc >>> 24) & 0xff; - - // old implementations (before PKZip 2.04g) used two byte check - if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; - - // 4. create output - const result = Buffer.alloc(data.length + 12); - encrypter(salt, result); - - // finally encode content - return encrypter(data, result, 12); -} - -module.exports = { decrypt, encrypt, _salter }; - - -/***/ }), - -/***/ 4522: -/***/ ((module) => { - -module.exports = { - /* The local file header */ - LOCHDR : 30, // LOC header size - LOCSIG : 0x04034b50, // "PK\003\004" - LOCVER : 4, // version needed to extract - LOCFLG : 6, // general purpose bit flag - LOCHOW : 8, // compression method - LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) - LOCCRC : 14, // uncompressed file crc-32 value - LOCSIZ : 18, // compressed size - LOCLEN : 22, // uncompressed size - LOCNAM : 26, // filename length - LOCEXT : 28, // extra field length - - /* The Data descriptor */ - EXTSIG : 0x08074b50, // "PK\007\008" - EXTHDR : 16, // EXT header size - EXTCRC : 4, // uncompressed file crc-32 value - EXTSIZ : 8, // compressed size - EXTLEN : 12, // uncompressed size - - /* The central directory file header */ - CENHDR : 46, // CEN header size - CENSIG : 0x02014b50, // "PK\001\002" - CENVEM : 4, // version made by - CENVER : 6, // version needed to extract - CENFLG : 8, // encrypt, decrypt flags - CENHOW : 10, // compression method - CENTIM : 12, // modification time (2 bytes time, 2 bytes date) - CENCRC : 16, // uncompressed file crc-32 value - CENSIZ : 20, // compressed size - CENLEN : 24, // uncompressed size - CENNAM : 28, // filename length - CENEXT : 30, // extra field length - CENCOM : 32, // file comment length - CENDSK : 34, // volume number start - CENATT : 36, // internal file attributes - CENATX : 38, // external file attributes (host system dependent) - CENOFF : 42, // LOC header offset - - /* The entries in the end of central directory */ - ENDHDR : 22, // END header size - ENDSIG : 0x06054b50, // "PK\005\006" - ENDSUB : 8, // number of entries on this disk - ENDTOT : 10, // total number of entries - ENDSIZ : 12, // central directory size in bytes - ENDOFF : 16, // offset of first CEN header - ENDCOM : 20, // zip file comment length - - END64HDR : 20, // zip64 END header size - END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" - END64START : 4, // number of the disk with the start of the zip64 - END64OFF : 8, // relative offset of the zip64 end of central directory - END64NUMDISKS : 16, // total number of disks - - ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" - ZIP64HDR : 56, // zip64 record minimum size - ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE - ZIP64SIZE : 4, // zip64 size of the central directory record - ZIP64VEM : 12, // zip64 version made by - ZIP64VER : 14, // zip64 version needed to extract - ZIP64DSK : 16, // zip64 number of this disk - ZIP64DSKDIR : 20, // number of the disk with the start of the record directory - ZIP64SUB : 24, // number of entries on this disk - ZIP64TOT : 32, // total number of entries - ZIP64SIZB : 40, // zip64 central directory size in bytes - ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number - ZIP64EXTRA : 56, // extensible data sector - - /* Compression methods */ - STORED : 0, // no compression - SHRUNK : 1, // shrunk - REDUCED1 : 2, // reduced with compression factor 1 - REDUCED2 : 3, // reduced with compression factor 2 - REDUCED3 : 4, // reduced with compression factor 3 - REDUCED4 : 5, // reduced with compression factor 4 - IMPLODED : 6, // imploded - // 7 reserved for Tokenizing compression algorithm - DEFLATED : 8, // deflated - ENHANCED_DEFLATED: 9, // enhanced deflated - PKWARE : 10,// PKWare DCL imploded - // 11 reserved by PKWARE - BZIP2 : 12, // compressed using BZIP2 - // 13 reserved by PKWARE - LZMA : 14, // LZMA - // 15-17 reserved by PKWARE - IBM_TERSE : 18, // compressed using IBM TERSE - IBM_LZ77 : 19, // IBM LZ77 z - AES_ENCRYPT : 99, // WinZIP AES encryption method - - /* General purpose bit flag */ - // values can obtained with expression 2**bitnr - FLG_ENC : 1, // Bit 0: encrypted file - FLG_COMP1 : 2, // Bit 1, compression option - FLG_COMP2 : 4, // Bit 2, compression option - FLG_DESC : 8, // Bit 3, data descriptor - FLG_ENH : 16, // Bit 4, enhanced deflating - FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. - FLG_STR : 64, // Bit 6, strong encryption (patented) - // Bits 7-10: Currently unused. - FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) - // Bit 12: Reserved by PKWARE for enhanced compression. - // Bit 13: encrypted the Central Directory (patented). - // Bits 14-15: Reserved by PKWARE. - FLG_MSK : 4096, // mask header values - - /* Load type */ - FILE : 2, - BUFFER : 1, - NONE : 0, - - /* 4.5 Extensible data fields */ - EF_ID : 0, - EF_SIZE : 2, - - /* Header IDs */ - ID_ZIP64 : 0x0001, - ID_AVINFO : 0x0007, - ID_PFS : 0x0008, - ID_OS2 : 0x0009, - ID_NTFS : 0x000a, - ID_OPENVMS : 0x000c, - ID_UNIX : 0x000d, - ID_FORK : 0x000e, - ID_PATCH : 0x000f, - ID_X509_PKCS7 : 0x0014, - ID_X509_CERTID_F : 0x0015, - ID_X509_CERTID_C : 0x0016, - ID_STRONGENC : 0x0017, - ID_RECORD_MGT : 0x0018, - ID_X509_PKCS7_RL : 0x0019, - ID_IBM1 : 0x0065, - ID_IBM2 : 0x0066, - ID_POSZIP : 0x4690, - - EF_ZIP64_OR_32 : 0xffffffff, - EF_ZIP64_OR_16 : 0xffff, - EF_ZIP64_SUNCOMP : 0, - EF_ZIP64_SCOMP : 8, - EF_ZIP64_RHO : 16, - EF_ZIP64_DSN : 24 -}; - - -/***/ }), - -/***/ 1255: -/***/ ((module) => { - -module.exports = { - /* Header error messages */ - INVALID_LOC: "Invalid LOC header (bad signature)", - INVALID_CEN: "Invalid CEN header (bad signature)", - INVALID_END: "Invalid END header (bad signature)", - - /* ZipEntry error messages*/ - NO_DATA: "Nothing to decompress", - BAD_CRC: "CRC32 checksum failed", - FILE_IN_THE_WAY: "There is a file in the way: %s", - UNKNOWN_METHOD: "Invalid/unsupported compression method", - - /* Inflater error messages */ - AVAIL_DATA: "inflate::Available inflate data did not terminate", - INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", - TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", - INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", - INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", - INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", - INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", - INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", - INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", - INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", - - /* ADM-ZIP error messages */ - CANT_EXTRACT_FILE: "Could not extract the file", - CANT_OVERRIDE: "Target file already exists", - NO_ZIP: "No zip file was loaded", - NO_ENTRY: "Entry doesn't exist", - DIRECTORY_CONTENT_ERROR: "A directory cannot have content", - FILE_NOT_FOUND: "File not found: %s", - NOT_IMPLEMENTED: "Not implemented", - INVALID_FILENAME: "Invalid filename", - INVALID_FORMAT: "Invalid or unsupported zip format. No END header found" -}; - - -/***/ }), - -/***/ 8321: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const fs = (__nccwpck_require__(2895).require)(); -const pth = __nccwpck_require__(1017); - -fs.existsSync = fs.existsSync || pth.existsSync; - -module.exports = function (/*String*/ path) { - var _path = path || "", - _obj = newAttr(), - _stat = null; - - function newAttr() { - return { - directory: false, - readonly: false, - hidden: false, - executable: false, - mtime: 0, - atime: 0 - }; - } - - if (_path && fs.existsSync(_path)) { - _stat = fs.statSync(_path); - _obj.directory = _stat.isDirectory(); - _obj.mtime = _stat.mtime; - _obj.atime = _stat.atime; - _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner - _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right - _obj.hidden = pth.basename(_path)[0] === "."; - } else { - console.warn("Invalid path: " + _path); - } - - return { - get directory() { - return _obj.directory; - }, - - get readOnly() { - return _obj.readonly; - }, - - get hidden() { - return _obj.hidden; - }, - - get mtime() { - return _obj.mtime; - }, - - get atime() { - return _obj.atime; - }, - - get executable() { - return _obj.executable; - }, - - decodeAttributes: function () {}, - - encodeAttributes: function () {}, - - toJSON: function () { - return { - path: _path, - isDirectory: _obj.directory, - isReadOnly: _obj.readonly, - isHidden: _obj.hidden, - isExecutable: _obj.executable, - mTime: _obj.mtime, - aTime: _obj.atime - }; - }, - - toString: function () { - return JSON.stringify(this.toJSON(), null, "\t"); - } - }; -}; - - -/***/ }), - -/***/ 2895: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -exports.require = function () { - if (typeof process === "object" && process.versions && process.versions["electron"]) { - try { - const originalFs = __nccwpck_require__(2941); - if (Object.keys(originalFs).length > 0) { - return originalFs; - } - } catch (e) {} - } - return __nccwpck_require__(7147); -}; - - -/***/ }), - -/***/ 5182: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = __nccwpck_require__(1291); -module.exports.Constants = __nccwpck_require__(4522); -module.exports.Errors = __nccwpck_require__(1255); -module.exports.FileAttr = __nccwpck_require__(8321); - - -/***/ }), - -/***/ 1291: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const fsystem = (__nccwpck_require__(2895).require)(); -const pth = __nccwpck_require__(1017); -const Constants = __nccwpck_require__(4522); -const Errors = __nccwpck_require__(1255); -const isWin = typeof process === "object" && "win32" === process.platform; - -const is_Obj = (obj) => obj && typeof obj === "object"; - -// generate CRC32 lookup table -const crcTable = new Uint32Array(256).map((t, c) => { - for (let k = 0; k < 8; k++) { - if ((c & 1) !== 0) { - c = 0xedb88320 ^ (c >>> 1); - } else { - c >>>= 1; - } - } - return c >>> 0; -}); - -// UTILS functions - -function Utils(opts) { - this.sep = pth.sep; - this.fs = fsystem; - - if (is_Obj(opts)) { - // custom filesystem - if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { - this.fs = opts.fs; - } - } -} - -module.exports = Utils; - -// INSTANCED functions - -Utils.prototype.makeDir = function (/*String*/ folder) { - const self = this; - - // Sync - make directories tree - function mkdirSync(/*String*/ fpath) { - let resolvedPath = fpath.split(self.sep)[0]; - fpath.split(self.sep).forEach(function (name) { - if (!name || name.substr(-1, 1) === ":") return; - resolvedPath += self.sep + name; - var stat; - try { - stat = self.fs.statSync(resolvedPath); - } catch (e) { - self.fs.mkdirSync(resolvedPath); - } - if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY.replace("%s", resolvedPath); - }); - } - - mkdirSync(folder); -}; - -Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { - const self = this; - if (self.fs.existsSync(path)) { - if (!overwrite) return false; // cannot overwrite - - var stat = self.fs.statSync(path); - if (stat.isDirectory()) { - return false; - } - } - var folder = pth.dirname(path); - if (!self.fs.existsSync(folder)) { - self.makeDir(folder); - } - - var fd; - try { - fd = self.fs.openSync(path, "w", 438); // 0666 - } catch (e) { - self.fs.chmodSync(path, 438); - fd = self.fs.openSync(path, "w", 438); - } - if (fd) { - try { - self.fs.writeSync(fd, content, 0, content.length, 0); - } finally { - self.fs.closeSync(fd); - } - } - self.fs.chmodSync(path, attr || 438); - return true; -}; - -Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { - if (typeof attr === "function") { - callback = attr; - attr = undefined; - } - - const self = this; - - self.fs.exists(path, function (exist) { - if (exist && !overwrite) return callback(false); - - self.fs.stat(path, function (err, stat) { - if (exist && stat.isDirectory()) { - return callback(false); - } - - var folder = pth.dirname(path); - self.fs.exists(folder, function (exists) { - if (!exists) self.makeDir(folder); - - self.fs.open(path, "w", 438, function (err, fd) { - if (err) { - self.fs.chmod(path, 438, function () { - self.fs.open(path, "w", 438, function (err, fd) { - self.fs.write(fd, content, 0, content.length, 0, function () { - self.fs.close(fd, function () { - self.fs.chmod(path, attr || 438, function () { - callback(true); - }); - }); - }); - }); - }); - } else if (fd) { - self.fs.write(fd, content, 0, content.length, 0, function () { - self.fs.close(fd, function () { - self.fs.chmod(path, attr || 438, function () { - callback(true); - }); - }); - }); - } else { - self.fs.chmod(path, attr || 438, function () { - callback(true); - }); - } - }); - }); - }); - }); -}; - -Utils.prototype.findFiles = function (/*String*/ path) { - const self = this; - - function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { - if (typeof pattern === "boolean") { - recursive = pattern; - pattern = undefined; - } - let files = []; - self.fs.readdirSync(dir).forEach(function (file) { - var path = pth.join(dir, file); - - if (self.fs.statSync(path).isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); - - if (!pattern || pattern.test(path)) { - files.push(pth.normalize(path) + (self.fs.statSync(path).isDirectory() ? self.sep : "")); - } - }); - return files; - } - - return findSync(path, undefined, true); -}; - -Utils.prototype.getAttributes = function () {}; - -Utils.prototype.setAttributes = function () {}; - -// STATIC functions - -// crc32 single update (it is part of crc32) -Utils.crc32update = function (crc, byte) { - return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); -}; - -Utils.crc32 = function (buf) { - if (typeof buf === "string") { - buf = Buffer.from(buf, "utf8"); - } - // Generate crcTable - if (!crcTable.length) genCRCTable(); - - let len = buf.length; - let crc = ~0; - for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); - // xor and cast as uint32 number - return ~crc >>> 0; -}; - -Utils.methodToString = function (/*Number*/ method) { - switch (method) { - case Constants.STORED: - return "STORED (" + method + ")"; - case Constants.DEFLATED: - return "DEFLATED (" + method + ")"; - default: - return "UNSUPPORTED (" + method + ")"; - } -}; - -// removes ".." style path elements -Utils.canonical = function (/*string*/ path) { - if (!path) return ""; - // trick normalize think path is absolute - var safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); - return pth.join(".", safeSuffix); -}; - -// make abolute paths taking prefix as root folder -Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { - prefix = pth.resolve(pth.normalize(prefix)); - var parts = name.split("/"); - for (var i = 0, l = parts.length; i < l; i++) { - var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); - if (path.indexOf(prefix) === 0) { - return path; - } - } - return pth.normalize(pth.join(prefix, pth.basename(name))); -}; - -// converts buffer, Uint8Array, string types to buffer -Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input) { - if (Buffer.isBuffer(input)) { - return input; - } else if (input instanceof Uint8Array) { - return Buffer.from(input); - } else { - // expect string all other values are invalid and return empty buffer - return typeof input === "string" ? Buffer.from(input, "utf8") : Buffer.alloc(0); - } -}; - -Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { - var slice = Buffer.from(buffer.slice(index, index + 8)); - slice.swap64(); - - return parseInt(`0x${slice.toString("hex")}`); -}; - -Utils.isWin = isWin; // Do we have windows system -Utils.crcTable = crcTable; - - -/***/ }), - -/***/ 4057: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var Utils = __nccwpck_require__(5182), - Headers = __nccwpck_require__(4958), - Constants = Utils.Constants, - Methods = __nccwpck_require__(3928); - -module.exports = function (/*Buffer*/ input) { - var _entryHeader = new Headers.EntryHeader(), - _entryName = Buffer.alloc(0), - _comment = Buffer.alloc(0), - _isDirectory = false, - uncompressedData = null, - _extra = Buffer.alloc(0); - - function getCompressedDataFromZip() { - if (!input || !Buffer.isBuffer(input)) { - return Buffer.alloc(0); - } - _entryHeader.loadDataHeaderFromBinary(input); - return input.slice(_entryHeader.realDataOffset, _entryHeader.realDataOffset + _entryHeader.compressedSize); - } - - function crc32OK(data) { - // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the header is written - if ((_entryHeader.flags & 0x8) !== 0x8) { - if (Utils.crc32(data) !== _entryHeader.dataHeader.crc) { - return false; - } - } else { - // @TODO: load and check data descriptor header - // The fields in the local header are filled with zero, and the CRC-32 and size are appended in a 12-byte structure - // (optionally preceded by a 4-byte signature) immediately after the compressed data: - } - return true; - } - - function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { - if (typeof callback === "undefined" && typeof async === "string") { - pass = async; - async = void 0; - } - if (_isDirectory) { - if (async && callback) { - callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR); //si added error. - } - return Buffer.alloc(0); - } - - var compressedData = getCompressedDataFromZip(); - - if (compressedData.length === 0) { - // File is empty, nothing to decompress. - if (async && callback) callback(compressedData); - return compressedData; - } - - if (_entryHeader.encripted) { - if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { - throw new Error("ADM-ZIP: Incompatible password parameter"); - } - compressedData = Methods.ZipCrypto.decrypt(compressedData, _entryHeader, pass); - } - - var data = Buffer.alloc(_entryHeader.size); - - switch (_entryHeader.method) { - case Utils.Constants.STORED: - compressedData.copy(data); - if (!crc32OK(data)) { - if (async && callback) callback(data, Utils.Errors.BAD_CRC); //si added error - throw new Error(Utils.Errors.BAD_CRC); - } else { - //si added otherwise did not seem to return data. - if (async && callback) callback(data); - return data; - } - case Utils.Constants.DEFLATED: - var inflater = new Methods.Inflater(compressedData); - if (!async) { - const result = inflater.inflate(data); - result.copy(data, 0); - if (!crc32OK(data)) { - throw new Error(Utils.Errors.BAD_CRC + " " + _entryName.toString()); - } - return data; - } else { - inflater.inflateAsync(function (result) { - result.copy(result, 0); - if (callback) { - if (!crc32OK(result)) { - callback(result, Utils.Errors.BAD_CRC); //si added error - } else { - callback(result); - } - } - }); - } - break; - default: - if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD); - throw new Error(Utils.Errors.UNKNOWN_METHOD); - } - } - - function compress(/*Boolean*/ async, /*Function*/ callback) { - if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { - // no data set or the data wasn't changed to require recompression - if (async && callback) callback(getCompressedDataFromZip()); - return getCompressedDataFromZip(); - } - - if (uncompressedData.length && !_isDirectory) { - var compressedData; - // Local file header - switch (_entryHeader.method) { - case Utils.Constants.STORED: - _entryHeader.compressedSize = _entryHeader.size; - - compressedData = Buffer.alloc(uncompressedData.length); - uncompressedData.copy(compressedData); - - if (async && callback) callback(compressedData); - return compressedData; - default: - case Utils.Constants.DEFLATED: - var deflater = new Methods.Deflater(uncompressedData); - if (!async) { - var deflated = deflater.deflate(); - _entryHeader.compressedSize = deflated.length; - return deflated; - } else { - deflater.deflateAsync(function (data) { - compressedData = Buffer.alloc(data.length); - _entryHeader.compressedSize = data.length; - data.copy(compressedData); - callback && callback(compressedData); - }); - } - deflater = null; - break; - } - } else if (async && callback) { - callback(Buffer.alloc(0)); - } else { - return Buffer.alloc(0); - } - } - - function readUInt64LE(buffer, offset) { - return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); - } - - function parseExtra(data) { - var offset = 0; - var signature, size, part; - while (offset < data.length) { - signature = data.readUInt16LE(offset); - offset += 2; - size = data.readUInt16LE(offset); - offset += 2; - part = data.slice(offset, offset + size); - offset += size; - if (Constants.ID_ZIP64 === signature) { - parseZip64ExtendedInformation(part); - } - } - } - - //Override header field values with values from the ZIP64 extra field - function parseZip64ExtendedInformation(data) { - var size, compressedSize, offset, diskNumStart; - - if (data.length >= Constants.EF_ZIP64_SCOMP) { - size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); - if (_entryHeader.size === Constants.EF_ZIP64_OR_32) { - _entryHeader.size = size; - } - } - if (data.length >= Constants.EF_ZIP64_RHO) { - compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); - if (_entryHeader.compressedSize === Constants.EF_ZIP64_OR_32) { - _entryHeader.compressedSize = compressedSize; - } - } - if (data.length >= Constants.EF_ZIP64_DSN) { - offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); - if (_entryHeader.offset === Constants.EF_ZIP64_OR_32) { - _entryHeader.offset = offset; - } - } - if (data.length >= Constants.EF_ZIP64_DSN + 4) { - diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); - if (_entryHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { - _entryHeader.diskNumStart = diskNumStart; - } - } - } - - return { - get entryName() { - return _entryName.toString(); - }, - get rawEntryName() { - return _entryName; - }, - set entryName(val) { - _entryName = Utils.toBuffer(val); - var lastChar = _entryName[_entryName.length - 1]; - _isDirectory = lastChar === 47 || lastChar === 92; - _entryHeader.fileNameLength = _entryName.length; - }, - - get extra() { - return _extra; - }, - set extra(val) { - _extra = val; - _entryHeader.extraLength = val.length; - parseExtra(val); - }, - - get comment() { - return _comment.toString(); - }, - set comment(val) { - _comment = Utils.toBuffer(val); - _entryHeader.commentLength = _comment.length; - }, - - get name() { - var n = _entryName.toString(); - return _isDirectory - ? n - .substr(n.length - 1) - .split("/") - .pop() - : n.split("/").pop(); - }, - get isDirectory() { - return _isDirectory; - }, - - getCompressedData: function () { - return compress(false, null); - }, - - getCompressedDataAsync: function (/*Function*/ callback) { - compress(true, callback); - }, - - setData: function (value) { - uncompressedData = Utils.toBuffer(value); - if (!_isDirectory && uncompressedData.length) { - _entryHeader.size = uncompressedData.length; - _entryHeader.method = Utils.Constants.DEFLATED; - _entryHeader.crc = Utils.crc32(value); - _entryHeader.changed = true; - } else { - // folders and blank files should be stored - _entryHeader.method = Utils.Constants.STORED; - } - }, - - getData: function (pass) { - if (_entryHeader.changed) { - return uncompressedData; - } else { - return decompress(false, null, pass); - } - }, - - getDataAsync: function (/*Function*/ callback, pass) { - if (_entryHeader.changed) { - callback(uncompressedData); - } else { - decompress(true, callback, pass); - } - }, - - set attr(attr) { - _entryHeader.attr = attr; - }, - get attr() { - return _entryHeader.attr; - }, - - set header(/*Buffer*/ data) { - _entryHeader.loadFromBinary(data); - }, - - get header() { - return _entryHeader; - }, - - packHeader: function () { - // 1. create header (buffer) - var header = _entryHeader.entryHeaderToBinary(); - var addpos = Utils.Constants.CENHDR; - // 2. add file name - _entryName.copy(header, addpos); - addpos += _entryName.length; - // 3. add extra data - if (_entryHeader.extraLength) { - _extra.copy(header, addpos); - addpos += _entryHeader.extraLength; - } - // 4. add file comment - if (_entryHeader.commentLength) { - _comment.copy(header, addpos); - } - return header; - }, - - toJSON: function () { - const bytes = function (nr) { - return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; - }; - - return { - entryName: this.entryName, - name: this.name, - comment: this.comment, - isDirectory: this.isDirectory, - header: _entryHeader.toJSON(), - compressedData: bytes(input), - data: bytes(uncompressedData) - }; - }, - - toString: function () { - return JSON.stringify(this.toJSON(), null, "\t"); - } - }; -}; - - -/***/ }), - -/***/ 7744: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const ZipEntry = __nccwpck_require__(4057); -const Headers = __nccwpck_require__(4958); -const Utils = __nccwpck_require__(5182); - -module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { - var entryList = [], - entryTable = {}, - _comment = Buffer.alloc(0), - mainHeader = new Headers.MainHeader(), - loadedEntries = false; - - // assign options - const opts = Object.assign(Object.create(null), options); - - const { noSort } = opts; - - if (inBuffer) { - // is a memory buffer - readMainHeader(opts.readEntries); - } else { - // none. is a new file - loadedEntries = true; - } - - function iterateEntries(callback) { - const totalEntries = mainHeader.diskEntries; // total number of entries - let index = mainHeader.offset; // offset of first CEN header - - for (let i = 0; i < totalEntries; i++) { - let tmp = index; - const entry = new ZipEntry(inBuffer); - - entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - - index += entry.header.entryHeaderSize; - - callback(entry); - } - } - - function readEntries() { - loadedEntries = true; - entryTable = {}; - entryList = new Array(mainHeader.diskEntries); // total number of entries - var index = mainHeader.offset; // offset of first CEN header - for (var i = 0; i < entryList.length; i++) { - var tmp = index, - entry = new ZipEntry(inBuffer); - entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - - entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - - if (entry.header.extraLength) { - entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); - } - - if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); - - index += entry.header.entryHeaderSize; - - entryList[i] = entry; - entryTable[entry.entryName] = entry; - } - } - - function readMainHeader(/*Boolean*/ readNow) { - var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size - max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length - n = max, - endStart = inBuffer.length, - endOffset = -1, // Start offset of the END header - commentEnd = 0; - - for (i; i >= n; i--) { - if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' - if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { - // "PK\005\006" - endOffset = i; - commentEnd = i; - endStart = i + Utils.Constants.ENDHDR; - // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature - n = i - Utils.Constants.END64HDR; - continue; - } - - if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { - // Found a zip64 signature, let's continue reading the whole zip64 record - n = max; - continue; - } - - if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { - // Found the zip64 record, let's determine it's size - endOffset = i; - endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; - break; - } - } - - if (!~endOffset) throw new Error(Utils.Errors.INVALID_FORMAT); - - mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); - if (mainHeader.commentLength) { - _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); - } - if (readNow) readEntries(); - } - - function sortEntries() { - if (entryList.length > 1 && !noSort) { - entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); - } - } - - return { - /** - * Returns an array of ZipEntry objects existent in the current opened archive - * @return Array - */ - get entries() { - if (!loadedEntries) { - readEntries(); - } - return entryList; - }, - - /** - * Archive comment - * @return {String} - */ - get comment() { - return _comment.toString(); - }, - set comment(val) { - _comment = Utils.toBuffer(val); - mainHeader.commentLength = _comment.length; - }, - - getEntryCount: function () { - if (!loadedEntries) { - return mainHeader.diskEntries; - } - - return entryList.length; - }, - - forEach: function (callback) { - if (!loadedEntries) { - iterateEntries(callback); - return; - } - - entryList.forEach(callback); - }, - - /** - * Returns a reference to the entry with the given name or null if entry is inexistent - * - * @param entryName - * @return ZipEntry - */ - getEntry: function (/*String*/ entryName) { - if (!loadedEntries) { - readEntries(); - } - return entryTable[entryName] || null; - }, - - /** - * Adds the given entry to the entry list - * - * @param entry - */ - setEntry: function (/*ZipEntry*/ entry) { - if (!loadedEntries) { - readEntries(); - } - entryList.push(entry); - entryTable[entry.entryName] = entry; - mainHeader.totalEntries = entryList.length; - }, - - /** - * Removes the entry with the given name from the entry list. - * - * If the entry is a directory, then all nested files and directories will be removed - * @param entryName - */ - deleteEntry: function (/*String*/ entryName) { - if (!loadedEntries) { - readEntries(); - } - var entry = entryTable[entryName]; - if (entry && entry.isDirectory) { - var _self = this; - this.getEntryChildren(entry).forEach(function (child) { - if (child.entryName !== entryName) { - _self.deleteEntry(child.entryName); - } - }); - } - entryList.splice(entryList.indexOf(entry), 1); - delete entryTable[entryName]; - mainHeader.totalEntries = entryList.length; - }, - - /** - * Iterates and returns all nested files and directories of the given entry - * - * @param entry - * @return Array - */ - getEntryChildren: function (/*ZipEntry*/ entry) { - if (!loadedEntries) { - readEntries(); - } - if (entry && entry.isDirectory) { - const list = []; - const name = entry.entryName; - const len = name.length; - - entryList.forEach(function (zipEntry) { - if (zipEntry.entryName.substr(0, len) === name) { - list.push(zipEntry); - } - }); - return list; - } - return []; - }, + /** + * Remove the entry from the file or the entry and all it's nested directories and files if the given entry is a directory + * + * @param {ZipEntry|string} entry + * @returns {void} + */ + deleteFile: function (entry, withsubfolders = true) { + // @TODO: test deleteFile + var item = getEntry(entry); + if (item) { + _zip.deleteFile(item.entryName, withsubfolders); + } + }, /** - * Returns the zip file + * Remove the entry from the file or directory without affecting any nested entries * - * @return Buffer + * @param {ZipEntry|string} entry + * @returns {void} */ - compressToBuffer: function () { - if (!loadedEntries) { - readEntries(); - } - sortEntries(); - - const dataBlock = []; - const entryHeaders = []; - let totalSize = 0; - let dindex = 0; - - mainHeader.size = 0; - mainHeader.offset = 0; - - for (const entry of entryList) { - // compress data and set local and entry header accordingly. Reason why is called first - const compressedData = entry.getCompressedData(); - // 1. construct data header - entry.header.offset = dindex; - const dataHeader = entry.header.dataHeaderToBinary(); - const entryNameLen = entry.rawEntryName.length; - // 1.2. postheader - data after data header - const postHeader = Buffer.alloc(entryNameLen + entry.extra.length); - entry.rawEntryName.copy(postHeader, 0); - postHeader.copy(entry.extra, entryNameLen); - - // 2. offsets - const dataLength = dataHeader.length + postHeader.length + compressedData.length; - dindex += dataLength; - - // 3. store values in sequence - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); - - // 4. construct entry header - const entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - // 5. update main header - mainHeader.size += entryHeader.length; - totalSize += dataLength + entryHeader.length; - } - - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; - - dindex = 0; - const outBuffer = Buffer.alloc(totalSize); - // write data blocks - for (const content of dataBlock) { - content.copy(outBuffer, dindex); - dindex += content.length; - } - - // write central directory entries - for (const content of entryHeaders) { - content.copy(outBuffer, dindex); - dindex += content.length; - } - - // write main header - const mh = mainHeader.toBinary(); - if (_comment) { - _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } - mh.copy(outBuffer, dindex); - - return outBuffer; - }, - - toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { - try { - if (!loadedEntries) { - readEntries(); - } - sortEntries(); - - const dataBlock = []; - const entryHeaders = []; - let totalSize = 0; - let dindex = 0; - - mainHeader.size = 0; - mainHeader.offset = 0; - - const compress2Buffer = function (entryLists) { - if (entryLists.length) { - const entry = entryLists.pop(); - const name = entry.entryName + entry.extra.toString(); - if (onItemStart) onItemStart(name); - entry.getCompressedDataAsync(function (compressedData) { - if (onItemEnd) onItemEnd(name); - - entry.header.offset = dindex; - // data header - const dataHeader = entry.header.dataHeaderToBinary(); - const postHeader = Buffer.alloc(name.length, name); - const dataLength = dataHeader.length + postHeader.length + compressedData.length; - - dindex += dataLength; - - dataBlock.push(dataHeader); - dataBlock.push(postHeader); - dataBlock.push(compressedData); - - const entryHeader = entry.packHeader(); - entryHeaders.push(entryHeader); - mainHeader.size += entryHeader.length; - totalSize += dataLength + entryHeader.length; - - compress2Buffer(entryLists); - }); - } else { - totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length - // point to end of data and beginning of central directory first record - mainHeader.offset = dindex; - - dindex = 0; - const outBuffer = Buffer.alloc(totalSize); - dataBlock.forEach(function (content) { - content.copy(outBuffer, dindex); // write data blocks - dindex += content.length; - }); - entryHeaders.forEach(function (content) { - content.copy(outBuffer, dindex); // write central directory entries - dindex += content.length; - }); - - const mh = mainHeader.toBinary(); - if (_comment) { - _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment - } + deleteEntry: function (entry) { + // @TODO: test deleteEntry + var item = getEntry(entry); + if (item) { + _zip.deleteEntry(item.entryName); + } + }, - mh.copy(outBuffer, dindex); // write main header + /** + * Adds a comment to the zip. The zip must be rewritten after adding the comment. + * + * @param {string} comment + */ + addZipComment: function (comment) { + // @TODO: test addZipComment + _zip.comment = comment; + }, - onSuccess(outBuffer); - } - }; + /** + * Returns the zip comment + * + * @return String + */ + getZipComment: function () { + return _zip.comment || ""; + }, - compress2Buffer(entryList); - } catch (e) { - onFail(e); + /** + * Adds a comment to a specified zipEntry. The zip must be rewritten after adding the comment + * The comment cannot exceed 65535 characters in length + * + * @param {ZipEntry} entry + * @param {string} comment + */ + addZipEntryComment: function (entry, comment) { + var item = getEntry(entry); + if (item) { + item.comment = comment; } - } - }; -}; - - -/***/ }), + }, -/***/ 9417: -/***/ ((module) => { + /** + * Returns the comment of the specified entry + * + * @param {ZipEntry} entry + * @return String + */ + getZipEntryComment: function (entry) { + var item = getEntry(entry); + if (item) { + return item.comment || ""; + } + return ""; + }, -"use strict"; + /** + * Updates the content of an existing entry inside the archive. The zip must be rewritten after updating the content + * + * @param {ZipEntry} entry + * @param {Buffer} content + */ + updateFile: function (entry, content) { + var item = getEntry(entry); + if (item) { + item.setData(content); + } + }, -module.exports = balanced; -function balanced(a, b, str) { - if (a instanceof RegExp) a = maybeMatch(a, str); - if (b instanceof RegExp) b = maybeMatch(b, str); + /** + * Adds a file from the disk to the archive + * + * @param {string} localPath File to add to zip + * @param {string} [zipPath] Optional path inside the zip + * @param {string} [zipName] Optional name for the file + * @param {string} [comment] Optional file comment + */ + addLocalFile: function (localPath, zipPath, zipName, comment) { + if (filetools.fs.existsSync(localPath)) { + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; - var r = range(a, b, str); + // p - local file name + const p = pth.win32.basename(pth.win32.normalize(localPath)); - return r && { - start: r[0], - end: r[1], - pre: str.slice(0, r[0]), - body: str.slice(r[0] + a.length, r[1]), - post: str.slice(r[1] + b.length) - }; -} + // add file name into zippath + zipPath += zipName ? zipName : p; -function maybeMatch(reg, str) { - var m = str.match(reg); - return m ? m[0] : null; -} + // read file attributes + const _attr = filetools.fs.statSync(localPath); -balanced.range = range; -function range(a, b, str) { - var begs, beg, left, right, result; - var ai = str.indexOf(a); - var bi = str.indexOf(b, ai + 1); - var i = ai; + // get file content + const data = _attr.isFile() ? filetools.fs.readFileSync(localPath) : Buffer.alloc(0); - if (ai >= 0 && bi > 0) { - if(a===b) { - return [ai, bi]; - } - begs = []; - left = str.length; + // if folder + if (_attr.isDirectory()) zipPath += filetools.sep; - while (i >= 0 && !result) { - if (i == ai) { - begs.push(i); - ai = str.indexOf(a, i + 1); - } else if (begs.length == 1) { - result = [ begs.pop(), bi ]; - } else { - beg = begs.pop(); - if (beg < left) { - left = beg; - right = bi; - } + // add file into zip file + this.addFile(zipPath, data, comment, _attr); + } else { + throw Utils.Errors.FILE_NOT_FOUND(localPath); + } + }, - bi = str.indexOf(b, i + 1); - } + /** + * Callback for showing if everything was done. + * + * @callback doneCallback + * @param {Error} err - Error object + * @param {boolean} done - was request fully completed + */ - i = ai < bi && ai >= 0 ? ai : bi; - } + /** + * Adds a file from the disk to the archive + * + * @param {(object|string)} options - options object, if it is string it us used as localPath. + * @param {string} options.localPath - Local path to the file. + * @param {string} [options.comment] - Optional file comment. + * @param {string} [options.zipPath] - Optional path inside the zip + * @param {string} [options.zipName] - Optional name for the file + * @param {doneCallback} callback - The callback that handles the response. + */ + addLocalFileAsync: function (options, callback) { + options = typeof options === "object" ? options : { localPath: options }; + const localPath = pth.resolve(options.localPath); + const { comment } = options; + let { zipPath, zipName } = options; + const self = this; + + filetools.fs.stat(localPath, function (err, stats) { + if (err) return callback(err, false); + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + // p - local file name + const p = pth.win32.basename(pth.win32.normalize(localPath)); + // add file name into zippath + zipPath += zipName ? zipName : p; - if (begs.length) { - result = [ left, right ]; - } - } + if (stats.isFile()) { + filetools.fs.readFile(localPath, function (err, data) { + if (err) return callback(err, false); + self.addFile(zipPath, data, comment, stats); + return setImmediate(callback, undefined, true); + }); + } else if (stats.isDirectory()) { + zipPath += filetools.sep; + self.addFile(zipPath, Buffer.alloc(0), comment, stats); + return setImmediate(callback, undefined, true); + } + }); + }, - return result; -} + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param {string} localPath - local path to the folder + * @param {string} [zipPath] - optional path inside zip + * @param {(RegExp|function)} [filter] - optional RegExp or Function if files match will be included. + */ + addLocalFolder: function (localPath, zipPath, filter) { + // Prepare filter + filter = filenameFilter(filter); + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; -/***/ }), + // normalize the path first + localPath = pth.normalize(localPath); -/***/ 3682: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + if (filetools.fs.existsSync(localPath)) { + const items = filetools.findFiles(localPath); + const self = this; -var register = __nccwpck_require__(4670); -var addHook = __nccwpck_require__(5549); -var removeHook = __nccwpck_require__(6819); + if (items.length) { + for (const filepath of items) { + const p = pth.join(zipPath, relativePath(localPath, filepath)); + if (filter(p)) { + self.addLocalFile(filepath, pth.dirname(p)); + } + } + } + } else { + throw Utils.Errors.FILE_NOT_FOUND(localPath); + } + }, -// bind with array of arguments: https://stackoverflow.com/a/21792913 -var bind = Function.bind; -var bindable = bind.bind(bind); + /** + * Asynchronous addLocalFolder + * @param {string} localPath + * @param {callback} callback + * @param {string} [zipPath] optional path inside zip + * @param {RegExp|function} [filter] optional RegExp or Function if files match will + * be included. + */ + addLocalFolderAsync: function (localPath, callback, zipPath, filter) { + // Prepare filter + filter = filenameFilter(filter); -function bindApi(hook, state, name) { - var removeHookRef = bindable(removeHook, null).apply( - null, - name ? [state, name] : [state] - ); - hook.api = { remove: removeHookRef }; - hook.remove = removeHookRef; - ["before", "error", "after", "wrap"].forEach(function (kind) { - var args = name ? [state, kind, name] : [state, kind]; - hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); - }); -} + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; -function HookSingular() { - var singularHookName = "h"; - var singularHookState = { - registry: {}, - }; - var singularHook = register.bind(null, singularHookState, singularHookName); - bindApi(singularHook, singularHookState, singularHookName); - return singularHook; -} + // normalize the path first + localPath = pth.normalize(localPath); -function HookCollection() { - var state = { - registry: {}, - }; + var self = this; + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { + callback(undefined, Utils.Errors.FILE_NOT_FOUND(localPath)); + } else if (err) { + callback(undefined, err); + } else { + var items = filetools.findFiles(localPath); + var i = -1; - var hook = register.bind(null, state); - bindApi(hook, state); + var next = function () { + i += 1; + if (i < items.length) { + var filepath = items[i]; + var p = relativePath(localPath, filepath).split("\\").join("/"); //windows fix + p = p + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix + if (filter(p)) { + filetools.fs.stat(filepath, function (er0, stats) { + if (er0) callback(undefined, er0); + if (stats.isFile()) { + filetools.fs.readFile(filepath, function (er1, data) { + if (er1) { + callback(undefined, er1); + } else { + self.addFile(zipPath + p, data, "", stats); + next(); + } + }); + } else { + self.addFile(zipPath + p + "/", Buffer.alloc(0), "", stats); + next(); + } + }); + } else { + process.nextTick(() => { + next(); + }); + } + } else { + callback(true, undefined); + } + }; - return hook; -} + next(); + } + }); + }, -var collectionHookDeprecationMessageDisplayed = false; -function Hook() { - if (!collectionHookDeprecationMessageDisplayed) { - console.warn( - '[before-after-hook]: "Hook()" repurposing warning, use "Hook.Collection()". Read more: https://git.io/upgrade-before-after-hook-to-1.4' - ); - collectionHookDeprecationMessageDisplayed = true; - } - return HookCollection(); -} + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param {object | string} options - options object, if it is string it us used as localPath. + * @param {string} options.localPath - Local path to the folder. + * @param {string} [options.zipPath] - optional path inside zip. + * @param {RegExp|function} [options.filter] - optional RegExp or Function if files match will be included. + * @param {function|string} [options.namefix] - optional function to help fix filename + * @param {doneCallback} callback - The callback that handles the response. + * + */ + addLocalFolderAsync2: function (options, callback) { + const self = this; + options = typeof options === "object" ? options : { localPath: options }; + localPath = pth.resolve(fixPath(options.localPath)); + let { zipPath, filter, namefix } = options; -Hook.Singular = HookSingular.bind(); -Hook.Collection = HookCollection.bind(); + if (filter instanceof RegExp) { + filter = (function (rx) { + return function (filename) { + return rx.test(filename); + }; + })(filter); + } else if ("function" !== typeof filter) { + filter = function () { + return true; + }; + } -module.exports = Hook; -// expose constructors as a named property for TypeScript -module.exports.Hook = Hook; -module.exports.Singular = Hook.Singular; -module.exports.Collection = Hook.Collection; + // fix ZipPath + zipPath = zipPath ? fixPath(zipPath) : ""; + // Check Namefix function + if (namefix == "latin1") { + namefix = (str) => + str + .normalize("NFD") + .replace(/[\u0300-\u036f]/g, "") + .replace(/[^\x20-\x7E]/g, ""); // accent fix (latin1 characers only) + } -/***/ }), + if (typeof namefix !== "function") namefix = (str) => str; -/***/ 5549: -/***/ ((module) => { + // internal, create relative path + fix the name + const relPathFix = (entry) => pth.join(zipPath, namefix(relativePath(localPath, entry))); + const fileNameFix = (entry) => pth.win32.basename(pth.win32.normalize(namefix(entry))); -module.exports = addHook; + filetools.fs.open(localPath, "r", function (err) { + if (err && err.code === "ENOENT") { + callback(undefined, Utils.Errors.FILE_NOT_FOUND(localPath)); + } else if (err) { + callback(undefined, err); + } else { + filetools.findFilesAsync(localPath, function (err, fileEntries) { + if (err) return callback(err); + fileEntries = fileEntries.filter((dir) => filter(relPathFix(dir))); + if (!fileEntries.length) callback(undefined, false); + + setImmediate( + fileEntries.reverse().reduce(function (next, entry) { + return function (err, done) { + if (err || done === false) return setImmediate(next, err, false); + + self.addLocalFileAsync( + { + localPath: entry, + zipPath: pth.dirname(relPathFix(entry)), + zipName: fileNameFix(entry) + }, + next + ); + }; + }, callback) + ); + }); + } + }); + }, -function addHook(state, kind, name, hook) { - var orig = hook; - if (!state.registry[name]) { - state.registry[name] = []; - } + /** + * Adds a local directory and all its nested files and directories to the archive + * + * @param {string} localPath - path where files will be extracted + * @param {object} props - optional properties + * @param {string} [props.zipPath] - optional path inside zip + * @param {RegExp|function} [props.filter] - optional RegExp or Function if files match will be included. + * @param {function|string} [props.namefix] - optional function to help fix filename + */ + addLocalFolderPromise: function (localPath, props) { + return new Promise((resolve, reject) => { + this.addLocalFolderAsync2(Object.assign({ localPath }, props), (err, done) => { + if (err) reject(err); + if (done) resolve(this); + }); + }); + }, - if (kind === "before") { - hook = function (method, options) { - return Promise.resolve() - .then(orig.bind(null, options)) - .then(method.bind(null, options)); - }; - } + /** + * Allows you to create a entry (file or directory) in the zip file. + * If you want to create a directory the entryName must end in / and a null buffer should be provided. + * Comment and attributes are optional + * + * @param {string} entryName + * @param {Buffer | string} content - file content as buffer or utf8 coded string + * @param {string} [comment] - file comment + * @param {number | object} [attr] - number as unix file permissions, object as filesystem Stats object + */ + addFile: function (entryName, content, comment, attr) { + entryName = zipnamefix(entryName); + let entry = getEntry(entryName); + const update = entry != null; - if (kind === "after") { - hook = function (method, options) { - var result; - return Promise.resolve() - .then(method.bind(null, options)) - .then(function (result_) { - result = result_; - return orig(result, options); - }) - .then(function () { - return result; - }); - }; - } + // prepare new entry + if (!update) { + entry = new ZipEntry(opts); + entry.entryName = entryName; + } + entry.comment = comment || ""; - if (kind === "error") { - hook = function (method, options) { - return Promise.resolve() - .then(method.bind(null, options)) - .catch(function (error) { - return orig(error, options); - }); - }; - } + const isStat = "object" === typeof attr && attr instanceof filetools.fs.Stats; - state.registry[name].push({ - hook: hook, - orig: orig, - }); -} + // last modification time from file stats + if (isStat) { + entry.header.time = attr.mtime; + } + // Set file attribute + var fileattr = entry.isDirectory ? 0x10 : 0; // (MS-DOS directory flag) -/***/ }), + // extended attributes field for Unix + // set file type either S_IFDIR / S_IFREG + let unix = entry.isDirectory ? 0x4000 : 0x8000; -/***/ 4670: -/***/ ((module) => { + if (isStat) { + // File attributes from file stats + unix |= 0xfff & attr.mode; + } else if ("number" === typeof attr) { + // attr from given attr values + unix |= 0xfff & attr; + } else { + // Default values: + unix |= entry.isDirectory ? 0o755 : 0o644; // permissions (drwxr-xr-x) or (-r-wr--r--) + } -module.exports = register; + fileattr = (fileattr | (unix << 16)) >>> 0; // add attributes -function register(state, name, method, options) { - if (typeof method !== "function") { - throw new Error("method for before hook must be a function"); - } + entry.attr = fileattr; - if (!options) { - options = {}; - } + entry.setData(content); + if (!update) _zip.setEntry(entry); - if (Array.isArray(name)) { - return name.reverse().reduce(function (callback, name) { - return register.bind(null, state, name, callback, options); - }, method)(); - } + return entry; + }, - return Promise.resolve().then(function () { - if (!state.registry[name]) { - return method(options); - } + /** + * Returns an array of ZipEntry objects representing the files and folders inside the archive + * + * @param {string} [password] + * @returns Array + */ + getEntries: function (password) { + _zip.password = password; + return _zip ? _zip.entries : []; + }, - return state.registry[name].reduce(function (method, registered) { - return registered.hook.bind(null, method, options); - }, method)(); - }); -} + /** + * Returns a ZipEntry object representing the file or folder specified by ``name``. + * + * @param {string} name + * @return ZipEntry + */ + getEntry: function (/**String*/ name) { + return getEntry(name); + }, + getEntryCount: function () { + return _zip.getEntryCount(); + }, -/***/ }), + forEach: function (callback) { + return _zip.forEach(callback); + }, -/***/ 6819: -/***/ ((module) => { + /** + * Extracts the given entry to the given targetPath + * If the entry is a directory inside the archive, the entire directory and it's subdirectories will be extracted + * + * @param {string|ZipEntry} entry - ZipEntry object or String with the full path of the entry + * @param {string} targetPath - Target folder where to write the file + * @param {boolean} [maintainEntryPath=true] - If maintainEntryPath is true and the entry is inside a folder, the entry folder will be created in targetPath as well. Default is TRUE + * @param {boolean} [overwrite=false] - If the file already exists at the target path, the file will be overwriten if this is true. + * @param {boolean} [keepOriginalPermission=false] - The file will be set as the permission from the entry if this is true. + * @param {string} [outFileName] - String If set will override the filename of the extracted file (Only works if the entry is a file) + * + * @return Boolean + */ + extractEntryTo: function (entry, targetPath, maintainEntryPath, overwrite, keepOriginalPermission, outFileName) { + overwrite = get_Bool(false, overwrite); + keepOriginalPermission = get_Bool(false, keepOriginalPermission); + maintainEntryPath = get_Bool(true, maintainEntryPath); + outFileName = get_Str(keepOriginalPermission, outFileName); -module.exports = removeHook; + var item = getEntry(entry); + if (!item) { + throw Utils.Errors.NO_ENTRY(); + } -function removeHook(state, name, method) { - if (!state.registry[name]) { - return; - } + var entryName = canonical(item.entryName); - var index = state.registry[name] - .map(function (registered) { - return registered.orig; - }) - .indexOf(method); + var target = sanitize(targetPath, outFileName && !item.isDirectory ? outFileName : maintainEntryPath ? entryName : pth.basename(entryName)); - if (index === -1) { - return; - } + if (item.isDirectory) { + var children = _zip.getEntryChildren(item); + children.forEach(function (child) { + if (child.isDirectory) return; + var content = child.getData(); + if (!content) { + throw Utils.Errors.CANT_EXTRACT_FILE(); + } + var name = canonical(child.entryName); + var childName = sanitize(targetPath, maintainEntryPath ? name : pth.basename(name)); + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? child.header.fileAttr : undefined; + filetools.writeFileTo(childName, content, overwrite, fileAttr); + }); + return true; + } - state.registry[name].splice(index, 1); -} + var content = item.getData(_zip.password); + if (!content) throw Utils.Errors.CANT_EXTRACT_FILE(); + if (filetools.fs.existsSync(target) && !overwrite) { + throw Utils.Errors.CANT_OVERRIDE(); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(target, content, overwrite, fileAttr); -/***/ }), + return true; + }, -/***/ 3717: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + /** + * Test the archive + * @param {string} [pass] + */ + test: function (pass) { + if (!_zip) { + return false; + } -var concatMap = __nccwpck_require__(6891); -var balanced = __nccwpck_require__(9417); + for (var entry in _zip.entries) { + try { + if (entry.isDirectory) { + continue; + } + var content = _zip.entries[entry].getData(pass); + if (!content) { + return false; + } + } catch (err) { + return false; + } + } + return true; + }, -module.exports = expandTop; + /** + * Extracts the entire archive to the given location + * + * @param {string} targetPath Target location + * @param {boolean} [overwrite=false] If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param {boolean} [keepOriginalPermission=false] The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param {string|Buffer} [pass] password + */ + extractAllTo: function (targetPath, overwrite, keepOriginalPermission, pass) { + keepOriginalPermission = get_Bool(false, keepOriginalPermission); + pass = get_Str(keepOriginalPermission, pass); + overwrite = get_Bool(false, overwrite); + if (!_zip) throw Utils.Errors.NO_ZIP(); -var escSlash = '\0SLASH'+Math.random()+'\0'; -var escOpen = '\0OPEN'+Math.random()+'\0'; -var escClose = '\0CLOSE'+Math.random()+'\0'; -var escComma = '\0COMMA'+Math.random()+'\0'; -var escPeriod = '\0PERIOD'+Math.random()+'\0'; + _zip.entries.forEach(function (entry) { + var entryName = sanitize(targetPath, canonical(entry.entryName)); + if (entry.isDirectory) { + filetools.makeDir(entryName); + return; + } + var content = entry.getData(pass); + if (!content) { + throw Utils.Errors.CANT_EXTRACT_FILE(); + } + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileTo(entryName, content, overwrite, fileAttr); + try { + filetools.fs.utimesSync(entryName, entry.header.time, entry.header.time); + } catch (err) { + throw Utils.Errors.CANT_EXTRACT_FILE(); + } + }); + }, -function numeric(str) { - return parseInt(str, 10) == str - ? parseInt(str, 10) - : str.charCodeAt(0); -} + /** + * Asynchronous extractAllTo + * + * @param {string} targetPath Target location + * @param {boolean} [overwrite=false] If the file already exists at the target path, the file will be overwriten if this is true. + * Default is FALSE + * @param {boolean} [keepOriginalPermission=false] The file will be set as the permission from the entry if this is true. + * Default is FALSE + * @param {function} callback The callback will be executed when all entries are extracted successfully or any error is thrown. + */ + extractAllToAsync: function (targetPath, overwrite, keepOriginalPermission, callback) { + callback = get_Fun(overwrite, keepOriginalPermission, callback); + keepOriginalPermission = get_Bool(false, keepOriginalPermission); + overwrite = get_Bool(false, overwrite); + if (!callback) { + return new Promise((resolve, reject) => { + this.extractAllToAsync(targetPath, overwrite, keepOriginalPermission, function (err) { + if (err) { + reject(err); + } else { + resolve(this); + } + }); + }); + } + if (!_zip) { + callback(Utils.Errors.NO_ZIP()); + return; + } -function escapeBraces(str) { - return str.split('\\\\').join(escSlash) - .split('\\{').join(escOpen) - .split('\\}').join(escClose) - .split('\\,').join(escComma) - .split('\\.').join(escPeriod); -} + targetPath = pth.resolve(targetPath); + // convert entryName to + const getPath = (entry) => sanitize(targetPath, pth.normalize(canonical(entry.entryName))); + const getError = (msg, file) => new Error(msg + ': "' + file + '"'); -function unescapeBraces(str) { - return str.split(escSlash).join('\\') - .split(escOpen).join('{') - .split(escClose).join('}') - .split(escComma).join(',') - .split(escPeriod).join('.'); -} + // separate directories from files + const dirEntries = []; + const fileEntries = []; + _zip.entries.forEach((e) => { + if (e.isDirectory) { + dirEntries.push(e); + } else { + fileEntries.push(e); + } + }); + // Create directory entries first synchronously + // this prevents race condition and assures folders are there before writing files + for (const entry of dirEntries) { + const dirPath = getPath(entry); + // The reverse operation for attr depend on method addFile() + const dirAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + try { + filetools.makeDir(dirPath); + if (dirAttr) filetools.fs.chmodSync(dirPath, dirAttr); + // in unix timestamp will change if files are later added to folder, but still + filetools.fs.utimesSync(dirPath, entry.header.time, entry.header.time); + } catch (er) { + callback(getError("Unable to create folder", dirPath)); + } + } -// Basically just str.split(","), but handling cases -// where we have nested braced sections, which should be -// treated as individual members, like {a,{b,c},d} -function parseCommaParts(str) { - if (!str) - return ['']; + fileEntries.reverse().reduce(function (next, entry) { + return function (err) { + if (err) { + next(err); + } else { + const entryName = pth.normalize(canonical(entry.entryName)); + const filePath = sanitize(targetPath, entryName); + entry.getDataAsync(function (content, err_1) { + if (err_1) { + next(err_1); + } else if (!content) { + next(Utils.Errors.CANT_EXTRACT_FILE()); + } else { + // The reverse operation for attr depend on method addFile() + const fileAttr = keepOriginalPermission ? entry.header.fileAttr : undefined; + filetools.writeFileToAsync(filePath, content, overwrite, fileAttr, function (succ) { + if (!succ) { + next(getError("Unable to write file", filePath)); + } + filetools.fs.utimes(filePath, entry.header.time, entry.header.time, function (err_2) { + if (err_2) { + next(getError("Unable to set times", filePath)); + } else { + next(); + } + }); + }); + } + }); + } + }; + }, callback)(); + }, - var parts = []; - var m = balanced('{', '}', str); + /** + * Writes the newly created zip file to disk at the specified location or if a zip was opened and no ``targetFileName`` is provided, it will overwrite the opened zip + * + * @param {string} targetFileName + * @param {function} callback + */ + writeZip: function (targetFileName, callback) { + if (arguments.length === 1) { + if (typeof targetFileName === "function") { + callback = targetFileName; + targetFileName = ""; + } + } - if (!m) - return str.split(','); + if (!targetFileName && opts.filename) { + targetFileName = opts.filename; + } + if (!targetFileName) return; - var pre = m.pre; - var body = m.body; - var post = m.post; - var p = pre.split(','); + var zipData = _zip.compressToBuffer(); + if (zipData) { + var ok = filetools.writeFileTo(targetFileName, zipData, true); + if (typeof callback === "function") callback(!ok ? new Error("failed") : null, ""); + } + }, - p[p.length-1] += '{' + body + '}'; - var postParts = parseCommaParts(post); - if (post.length) { - p[p.length-1] += postParts.shift(); - p.push.apply(p, postParts); - } + /** + * + * @param {string} targetFileName + * @param {object} [props] + * @param {boolean} [props.overwrite=true] If the file already exists at the target path, the file will be overwriten if this is true. + * @param {boolean} [props.perm] The file will be set as the permission from the entry if this is true. - parts.push.apply(parts, p); + * @returns {Promise} + */ + writeZipPromise: function (/**String*/ targetFileName, /* object */ props) { + const { overwrite, perm } = Object.assign({ overwrite: true }, props); - return parts; -} + return new Promise((resolve, reject) => { + // find file name + if (!targetFileName && opts.filename) targetFileName = opts.filename; + if (!targetFileName) reject("ADM-ZIP: ZIP File Name Missing"); -function expandTop(str) { - if (!str) - return []; + this.toBufferPromise().then((zipData) => { + const ret = (done) => (done ? resolve(done) : reject("ADM-ZIP: Wasn't able to write zip file")); + filetools.writeFileToAsync(targetFileName, zipData, overwrite, perm, ret); + }, reject); + }); + }, - // I don't know why Bash 4.3 does this, but it does. - // Anything starting with {} will have the first two bytes preserved - // but *only* at the top level, so {},a}b will not expand to anything, - // but a{},b}c will be expanded to [a}c,abc]. - // One could argue that this is a bug in Bash, but since the goal of - // this module is to match Bash's rules, we escape a leading {} - if (str.substr(0, 2) === '{}') { - str = '\\{\\}' + str.substr(2); - } + /** + * @returns {Promise} A promise to the Buffer. + */ + toBufferPromise: function () { + return new Promise((resolve, reject) => { + _zip.toAsyncBuffer(resolve, reject); + }); + }, - return expand(escapeBraces(str), true).map(unescapeBraces); -} + /** + * Returns the content of the entire zip file as a Buffer object + * + * @prop {function} [onSuccess] + * @prop {function} [onFail] + * @prop {function} [onItemStart] + * @prop {function} [onItemEnd] + * @returns {Buffer} + */ + toBuffer: function (onSuccess, onFail, onItemStart, onItemEnd) { + if (typeof onSuccess === "function") { + _zip.toAsyncBuffer(onSuccess, onFail, onItemStart, onItemEnd); + return null; + } + return _zip.compressToBuffer(); + } + }; +}; -function identity(e) { - return e; -} -function embrace(str) { - return '{' + str + '}'; -} -function isPadded(el) { - return /^-?0\d/.test(el); -} +/***/ }), -function lte(i, y) { - return i <= y; -} -function gte(i, y) { - return i >= y; -} +/***/ 9032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -function expand(str, isTop) { - var expansions = []; +var Utils = __nccwpck_require__(5182), + Constants = Utils.Constants; - var m = balanced('{', '}', str); - if (!m || /\$$/.test(m.pre)) return [str]; +/* The central directory file header */ +module.exports = function () { + var _verMade = 20, // v2.0 + _version = 10, // v1.0 + _flags = 0, + _method = 0, + _time = 0, + _crc = 0, + _compressedSize = 0, + _size = 0, + _fnameLen = 0, + _extraLen = 0, + _comLen = 0, + _diskStart = 0, + _inattr = 0, + _attr = 0, + _offset = 0; - var isNumericSequence = /^-?\d+\.\.-?\d+(?:\.\.-?\d+)?$/.test(m.body); - var isAlphaSequence = /^[a-zA-Z]\.\.[a-zA-Z](?:\.\.-?\d+)?$/.test(m.body); - var isSequence = isNumericSequence || isAlphaSequence; - var isOptions = m.body.indexOf(',') >= 0; - if (!isSequence && !isOptions) { - // {a},b} - if (m.post.match(/,.*\}/)) { - str = m.pre + '{' + m.body + escClose + m.post; - return expand(str); - } - return [str]; - } + _verMade |= Utils.isWin ? 0x0a00 : 0x0300; - var n; - if (isSequence) { - n = m.body.split(/\.\./); - } else { - n = parseCommaParts(m.body); - if (n.length === 1) { - // x{{a,b}}y ==> x{a}y x{b}y - n = expand(n[0], false).map(embrace); - if (n.length === 1) { - var post = m.post.length - ? expand(m.post, false) - : ['']; - return post.map(function(p) { - return m.pre + n[0] + p; - }); - } - } - } + // Set EFS flag since filename and comment fields are all by default encoded using UTF-8. + // Without it file names may be corrupted for other apps when file names use unicode chars + _flags |= Constants.FLG_EFS; - // at this point, n is the parts, and we know it's not a comma set - // with a single entry. + const _localHeader = { + extraLen: 0 + }; - // no need to expand pre, since it is guaranteed to be free of brace-sets - var pre = m.pre; - var post = m.post.length - ? expand(m.post, false) - : ['']; + // casting + const uint32 = (val) => Math.max(0, val) >>> 0; + const uint16 = (val) => Math.max(0, val) & 0xffff; + const uint8 = (val) => Math.max(0, val) & 0xff; - var N; + _time = Utils.fromDate2DOS(new Date()); - if (isSequence) { - var x = numeric(n[0]); - var y = numeric(n[1]); - var width = Math.max(n[0].length, n[1].length) - var incr = n.length == 3 - ? Math.abs(numeric(n[2])) - : 1; - var test = lte; - var reverse = y < x; - if (reverse) { - incr *= -1; - test = gte; - } - var pad = n.some(isPadded); + return { + get made() { + return _verMade; + }, + set made(val) { + _verMade = val; + }, - N = []; + get version() { + return _version; + }, + set version(val) { + _version = val; + }, - for (var i = x; test(i, y); i += incr) { - var c; - if (isAlphaSequence) { - c = String.fromCharCode(i); - if (c === '\\') - c = ''; - } else { - c = String(i); - if (pad) { - var need = width - c.length; - if (need > 0) { - var z = new Array(need + 1).join('0'); - if (i < 0) - c = '-' + z + c.slice(1); - else - c = z + c; - } - } - } - N.push(c); - } - } else { - N = concatMap(n, function(el) { return expand(el, false) }); - } + get flags() { + return _flags; + }, + set flags(val) { + _flags = val; + }, - for (var j = 0; j < N.length; j++) { - for (var k = 0; k < post.length; k++) { - var expansion = pre + N[j] + post[k]; - if (!isTop || isSequence || expansion) - expansions.push(expansion); - } - } + get flags_efs() { + return (_flags & Constants.FLG_EFS) > 0; + }, + set flags_efs(val) { + if (val) { + _flags |= Constants.FLG_EFS; + } else { + _flags &= ~Constants.FLG_EFS; + } + }, - return expansions; -} + get flags_desc() { + return (_flags & Constants.FLG_DESC) > 0; + }, + set flags_desc(val) { + if (val) { + _flags |= Constants.FLG_DESC; + } else { + _flags &= ~Constants.FLG_DESC; + } + }, + get method() { + return _method; + }, + set method(val) { + switch (val) { + case Constants.STORED: + this.version = 10; + case Constants.DEFLATED: + default: + this.version = 20; + } + _method = val; + }, + get time() { + return Utils.fromDOS2Date(this.timeval); + }, + set time(val) { + this.timeval = Utils.fromDate2DOS(val); + }, -/***/ }), + get timeval() { + return _time; + }, + set timeval(val) { + _time = uint32(val); + }, -/***/ 6891: -/***/ ((module) => { + get timeHighByte() { + return uint8(_time >>> 8); + }, + get crc() { + return _crc; + }, + set crc(val) { + _crc = uint32(val); + }, -module.exports = function (xs, fn) { - var res = []; - for (var i = 0; i < xs.length; i++) { - var x = fn(xs[i], i); - if (isArray(x)) res.push.apply(res, x); - else res.push(x); - } - return res; -}; + get compressedSize() { + return _compressedSize; + }, + set compressedSize(val) { + _compressedSize = uint32(val); + }, -var isArray = Array.isArray || function (xs) { - return Object.prototype.toString.call(xs) === '[object Array]'; -}; + get size() { + return _size; + }, + set size(val) { + _size = uint32(val); + }, + get fileNameLength() { + return _fnameLen; + }, + set fileNameLength(val) { + _fnameLen = val; + }, -/***/ }), + get extraLength() { + return _extraLen; + }, + set extraLength(val) { + _extraLen = val; + }, -/***/ 8932: -/***/ ((__unused_webpack_module, exports) => { + get extraLocalLength() { + return _localHeader.extraLen; + }, + set extraLocalLength(val) { + _localHeader.extraLen = val; + }, -"use strict"; + get commentLength() { + return _comLen; + }, + set commentLength(val) { + _comLen = val; + }, + get diskNumStart() { + return _diskStart; + }, + set diskNumStart(val) { + _diskStart = uint32(val); + }, -Object.defineProperty(exports, "__esModule", ({ value: true })); + get inAttr() { + return _inattr; + }, + set inAttr(val) { + _inattr = uint32(val); + }, -class Deprecation extends Error { - constructor(message) { - super(message); // Maintains proper stack trace (only available on V8) + get attr() { + return _attr; + }, + set attr(val) { + _attr = uint32(val); + }, - /* istanbul ignore next */ + // get Unix file permissions + get fileAttr() { + return (_attr || 0) >> 16 & 0xfff; + }, - if (Error.captureStackTrace) { - Error.captureStackTrace(this, this.constructor); - } + get offset() { + return _offset; + }, + set offset(val) { + _offset = uint32(val); + }, - this.name = 'Deprecation'; - } + get encrypted() { + return (_flags & Constants.FLG_ENC) === Constants.FLG_ENC; + }, -} + get centralHeaderSize() { + return Constants.CENHDR + _fnameLen + _extraLen + _comLen; + }, -exports.Deprecation = Deprecation; + get realDataOffset() { + return _offset + Constants.LOCHDR + _localHeader.fnameLen + _localHeader.extraLen; + }, + get localHeader() { + return _localHeader; + }, -/***/ }), + loadLocalHeaderFromBinary: function (/*Buffer*/ input) { + var data = input.slice(_offset, _offset + Constants.LOCHDR); + // 30 bytes and should start with "PK\003\004" + if (data.readUInt32LE(0) !== Constants.LOCSIG) { + throw Utils.Errors.INVALID_LOC(); + } -/***/ 6863: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // version needed to extract + _localHeader.version = data.readUInt16LE(Constants.LOCVER); + // general purpose bit flag + _localHeader.flags = data.readUInt16LE(Constants.LOCFLG); + // compression method + _localHeader.method = data.readUInt16LE(Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + _localHeader.time = data.readUInt32LE(Constants.LOCTIM); + // uncompressed file crc-32 valu + _localHeader.crc = data.readUInt32LE(Constants.LOCCRC); + // compressed size + _localHeader.compressedSize = data.readUInt32LE(Constants.LOCSIZ); + // uncompressed size + _localHeader.size = data.readUInt32LE(Constants.LOCLEN); + // filename length + _localHeader.fnameLen = data.readUInt16LE(Constants.LOCNAM); + // extra field length + _localHeader.extraLen = data.readUInt16LE(Constants.LOCEXT); -module.exports = realpath -realpath.realpath = realpath -realpath.sync = realpathSync -realpath.realpathSync = realpathSync -realpath.monkeypatch = monkeypatch -realpath.unmonkeypatch = unmonkeypatch - -var fs = __nccwpck_require__(7147) -var origRealpath = fs.realpath -var origRealpathSync = fs.realpathSync - -var version = process.version -var ok = /^v[0-5]\./.test(version) -var old = __nccwpck_require__(1734) - -function newError (er) { - return er && er.syscall === 'realpath' && ( - er.code === 'ELOOP' || - er.code === 'ENOMEM' || - er.code === 'ENAMETOOLONG' - ) -} + // read extra data + const extraStart = _offset + Constants.LOCHDR + _localHeader.fnameLen; + const extraEnd = extraStart + _localHeader.extraLen; + return input.slice(extraStart, extraEnd); + }, -function realpath (p, cache, cb) { - if (ok) { - return origRealpath(p, cache, cb) - } + loadFromBinary: function (/*Buffer*/ data) { + // data should be 46 bytes and start with "PK 01 02" + if (data.length !== Constants.CENHDR || data.readUInt32LE(0) !== Constants.CENSIG) { + throw Utils.Errors.INVALID_CEN(); + } + // version made by + _verMade = data.readUInt16LE(Constants.CENVEM); + // version needed to extract + _version = data.readUInt16LE(Constants.CENVER); + // encrypt, decrypt flags + _flags = data.readUInt16LE(Constants.CENFLG); + // compression method + _method = data.readUInt16LE(Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + _time = data.readUInt32LE(Constants.CENTIM); + // uncompressed file crc-32 value + _crc = data.readUInt32LE(Constants.CENCRC); + // compressed size + _compressedSize = data.readUInt32LE(Constants.CENSIZ); + // uncompressed size + _size = data.readUInt32LE(Constants.CENLEN); + // filename length + _fnameLen = data.readUInt16LE(Constants.CENNAM); + // extra field length + _extraLen = data.readUInt16LE(Constants.CENEXT); + // file comment length + _comLen = data.readUInt16LE(Constants.CENCOM); + // volume number start + _diskStart = data.readUInt16LE(Constants.CENDSK); + // internal file attributes + _inattr = data.readUInt16LE(Constants.CENATT); + // external file attributes + _attr = data.readUInt32LE(Constants.CENATX); + // LOC header offset + _offset = data.readUInt32LE(Constants.CENOFF); + }, - if (typeof cache === 'function') { - cb = cache - cache = null - } - origRealpath(p, cache, function (er, result) { - if (newError(er)) { - old.realpath(p, cache, cb) - } else { - cb(er, result) - } - }) -} + localHeaderToBinary: function () { + // LOC header size (30 bytes) + var data = Buffer.alloc(Constants.LOCHDR); + // "PK\003\004" + data.writeUInt32LE(Constants.LOCSIG, 0); + // version needed to extract + data.writeUInt16LE(_version, Constants.LOCVER); + // general purpose bit flag + data.writeUInt16LE(_flags, Constants.LOCFLG); + // compression method + data.writeUInt16LE(_method, Constants.LOCHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.LOCTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.LOCCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.LOCSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.LOCLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.LOCNAM); + // extra field length + data.writeUInt16LE(_localHeader.extraLen, Constants.LOCEXT); + return data; + }, -function realpathSync (p, cache) { - if (ok) { - return origRealpathSync(p, cache) - } + centralHeaderToBinary: function () { + // CEN header size (46 bytes) + var data = Buffer.alloc(Constants.CENHDR + _fnameLen + _extraLen + _comLen); + // "PK\001\002" + data.writeUInt32LE(Constants.CENSIG, 0); + // version made by + data.writeUInt16LE(_verMade, Constants.CENVEM); + // version needed to extract + data.writeUInt16LE(_version, Constants.CENVER); + // encrypt, decrypt flags + data.writeUInt16LE(_flags, Constants.CENFLG); + // compression method + data.writeUInt16LE(_method, Constants.CENHOW); + // modification time (2 bytes time, 2 bytes date) + data.writeUInt32LE(_time, Constants.CENTIM); + // uncompressed file crc-32 value + data.writeUInt32LE(_crc, Constants.CENCRC); + // compressed size + data.writeUInt32LE(_compressedSize, Constants.CENSIZ); + // uncompressed size + data.writeUInt32LE(_size, Constants.CENLEN); + // filename length + data.writeUInt16LE(_fnameLen, Constants.CENNAM); + // extra field length + data.writeUInt16LE(_extraLen, Constants.CENEXT); + // file comment length + data.writeUInt16LE(_comLen, Constants.CENCOM); + // volume number start + data.writeUInt16LE(_diskStart, Constants.CENDSK); + // internal file attributes + data.writeUInt16LE(_inattr, Constants.CENATT); + // external file attributes + data.writeUInt32LE(_attr, Constants.CENATX); + // LOC header offset + data.writeUInt32LE(_offset, Constants.CENOFF); + return data; + }, - try { - return origRealpathSync(p, cache) - } catch (er) { - if (newError(er)) { - return old.realpathSync(p, cache) - } else { - throw er - } - } -} + toJSON: function () { + const bytes = function (nr) { + return nr + " bytes"; + }; -function monkeypatch () { - fs.realpath = realpath - fs.realpathSync = realpathSync -} + return { + made: _verMade, + version: _version, + flags: _flags, + method: Utils.methodToString(_method), + time: this.time, + crc: "0x" + _crc.toString(16).toUpperCase(), + compressedSize: bytes(_compressedSize), + size: bytes(_size), + fileNameLength: bytes(_fnameLen), + extraLength: bytes(_extraLen), + commentLength: bytes(_comLen), + diskNumStart: _diskStart, + inAttr: _inattr, + attr: _attr, + offset: _offset, + centralHeaderSize: bytes(Constants.CENHDR + _fnameLen + _extraLen + _comLen) + }; + }, -function unmonkeypatch () { - fs.realpath = origRealpath - fs.realpathSync = origRealpathSync -} + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; /***/ }), -/***/ 1734: +/***/ 4958: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { -// Copyright Joyent, Inc. and other Node contributors. -// -// Permission is hereby granted, free of charge, to any person obtaining a -// copy of this software and associated documentation files (the -// "Software"), to deal in the Software without restriction, including -// without limitation the rights to use, copy, modify, merge, publish, -// distribute, sublicense, and/or sell copies of the Software, and to permit -// persons to whom the Software is furnished to do so, subject to the -// following conditions: -// -// The above copyright notice and this permission notice shall be included -// in all copies or substantial portions of the Software. -// -// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS -// OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF -// MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN -// NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, -// DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR -// OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE -// USE OR OTHER DEALINGS IN THE SOFTWARE. - -var pathModule = __nccwpck_require__(1017); -var isWindows = process.platform === 'win32'; -var fs = __nccwpck_require__(7147); - -// JavaScript implementation of realpath, ported from node pre-v6 - -var DEBUG = process.env.NODE_DEBUG && /fs/.test(process.env.NODE_DEBUG); - -function rethrow() { - // Only enable in debug mode. A backtrace uses ~1000 bytes of heap space and - // is fairly slow to generate. - var callback; - if (DEBUG) { - var backtrace = new Error; - callback = debugCallback; - } else - callback = missingCallback; - - return callback; - - function debugCallback(err) { - if (err) { - backtrace.message = err.message; - err = backtrace; - missingCallback(err); - } - } - - function missingCallback(err) { - if (err) { - if (process.throwDeprecation) - throw err; // Forgot a callback but don't know where? Use NODE_DEBUG=fs - else if (!process.noDeprecation) { - var msg = 'fs: missing callback ' + (err.stack || err.message); - if (process.traceDeprecation) - console.trace(msg); - else - console.error(msg); - } - } - } -} - -function maybeCallback(cb) { - return typeof cb === 'function' ? cb : rethrow(); -} - -var normalize = pathModule.normalize; +exports.EntryHeader = __nccwpck_require__(9032); +exports.MainHeader = __nccwpck_require__(8952); -// Regexp that finds the next partion of a (partial) path -// result is [base_with_slash, base], e.g. ['somedir/', 'somedir'] -if (isWindows) { - var nextPartRe = /(.*?)(?:[\/\\]+|$)/g; -} else { - var nextPartRe = /(.*?)(?:[\/]+|$)/g; -} -// Regex to find the device root, including trailing slash. E.g. 'c:\\'. -if (isWindows) { - var splitRootRe = /^(?:[a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/][^\\\/]+)?[\\\/]*/; -} else { - var splitRootRe = /^[\/]*/; -} +/***/ }), -exports.realpathSync = function realpathSync(p, cache) { - // make p is absolute - p = pathModule.resolve(p); +/***/ 8952: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return cache[p]; - } +var Utils = __nccwpck_require__(5182), + Constants = Utils.Constants; - var original = p, - seenLinks = {}, - knownHard = {}; +/* The entries in the end of central directory */ +module.exports = function () { + var _volumeEntries = 0, + _totalEntries = 0, + _size = 0, + _offset = 0, + _commentLength = 0; - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; + return { + get diskEntries() { + return _volumeEntries; + }, + set diskEntries(/*Number*/ val) { + _volumeEntries = _totalEntries = val; + }, - start(); + get totalEntries() { + return _totalEntries; + }, + set totalEntries(/*Number*/ val) { + _totalEntries = _volumeEntries = val; + }, - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + get size() { + return _size; + }, + set size(/*Number*/ val) { + _size = val; + }, - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstatSync(base); - knownHard[base] = true; - } - } + get offset() { + return _offset; + }, + set offset(/*Number*/ val) { + _offset = val; + }, - // walk down the path, swapping out linked pathparts for their real - // values - // NB: p.length changes. - while (pos < p.length) { - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; + get commentLength() { + return _commentLength; + }, + set commentLength(/*Number*/ val) { + _commentLength = val; + }, - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - continue; - } + get mainHeaderSize() { + return Constants.ENDHDR + _commentLength; + }, - var resolvedLink; - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // some known symbolic link. no need to stat again. - resolvedLink = cache[base]; - } else { - var stat = fs.lstatSync(base); - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - continue; - } + loadFromBinary: function (/*Buffer*/ data) { + // data should be 22 bytes and start with "PK 05 06" + // or be 56+ bytes and start with "PK 06 06" for Zip64 + if ( + (data.length !== Constants.ENDHDR || data.readUInt32LE(0) !== Constants.ENDSIG) && + (data.length < Constants.ZIP64HDR || data.readUInt32LE(0) !== Constants.ZIP64SIG) + ) { + throw Utils.Errors.INVALID_END(); + } - // read the link if it wasn't read before - // dev/ino always return 0 on windows, so skip the check. - var linkTarget = null; - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - linkTarget = seenLinks[id]; - } - } - if (linkTarget === null) { - fs.statSync(base); - linkTarget = fs.readlinkSync(base); - } - resolvedLink = pathModule.resolve(previous, linkTarget); - // track this, if given a cache. - if (cache) cache[base] = resolvedLink; - if (!isWindows) seenLinks[id] = linkTarget; - } + if (data.readUInt32LE(0) === Constants.ENDSIG) { + // number of entries on this volume + _volumeEntries = data.readUInt16LE(Constants.ENDSUB); + // total number of entries + _totalEntries = data.readUInt16LE(Constants.ENDTOT); + // central directory size in bytes + _size = data.readUInt32LE(Constants.ENDSIZ); + // offset of first CEN header + _offset = data.readUInt32LE(Constants.ENDOFF); + // zip file comment length + _commentLength = data.readUInt16LE(Constants.ENDCOM); + } else { + // number of entries on this volume + _volumeEntries = Utils.readBigUInt64LE(data, Constants.ZIP64SUB); + // total number of entries + _totalEntries = Utils.readBigUInt64LE(data, Constants.ZIP64TOT); + // central directory size in bytes + _size = Utils.readBigUInt64LE(data, Constants.ZIP64SIZE); + // offset of first CEN header + _offset = Utils.readBigUInt64LE(data, Constants.ZIP64OFF); - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } + _commentLength = 0; + } + }, - if (cache) cache[original] = p; + toBinary: function () { + var b = Buffer.alloc(Constants.ENDHDR + _commentLength); + // "PK 05 06" signature + b.writeUInt32LE(Constants.ENDSIG, 0); + b.writeUInt32LE(0, 4); + // number of entries on this volume + b.writeUInt16LE(_volumeEntries, Constants.ENDSUB); + // total number of entries + b.writeUInt16LE(_totalEntries, Constants.ENDTOT); + // central directory size in bytes + b.writeUInt32LE(_size, Constants.ENDSIZ); + // offset of first CEN header + b.writeUInt32LE(_offset, Constants.ENDOFF); + // zip file comment length + b.writeUInt16LE(_commentLength, Constants.ENDCOM); + // fill comment memory with spaces so no garbage is left there + b.fill(" ", Constants.ENDHDR); - return p; -}; + return b; + }, + toJSON: function () { + // creates 0x0000 style output + const offset = function (nr, len) { + let offs = nr.toString(16).toUpperCase(); + while (offs.length < len) offs = "0" + offs; + return "0x" + offs; + }; -exports.realpath = function realpath(p, cache, cb) { - if (typeof cb !== 'function') { - cb = maybeCallback(cache); - cache = null; - } + return { + diskEntries: _volumeEntries, + totalEntries: _totalEntries, + size: _size + " bytes", + offset: offset(_offset, 4), + commentLength: _commentLength + }; + }, - // make p is absolute - p = pathModule.resolve(p); + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; +// Misspelled - if (cache && Object.prototype.hasOwnProperty.call(cache, p)) { - return process.nextTick(cb.bind(null, null, cache[p])); - } - var original = p, - seenLinks = {}, - knownHard = {}; +/***/ }), - // current character position in p - var pos; - // the partial path so far, including a trailing slash if any - var current; - // the partial path without a trailing slash (except when pointing at a root) - var base; - // the partial path scanned in the previous round, with slash - var previous; +/***/ 7686: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - start(); +module.exports = function (/*Buffer*/ inbuf) { + var zlib = __nccwpck_require__(9796); - function start() { - // Skip over roots - var m = splitRootRe.exec(p); - pos = m[0].length; - current = m[0]; - base = m[0]; - previous = ''; + var opts = { chunkSize: (parseInt(inbuf.length / 1024) + 1) * 1024 }; - // On windows, check that the root exists. On unix there is no need. - if (isWindows && !knownHard[base]) { - fs.lstat(base, function(err) { - if (err) return cb(err); - knownHard[base] = true; - LOOP(); - }); - } else { - process.nextTick(LOOP); - } - } + return { + deflate: function () { + return zlib.deflateRawSync(inbuf, opts); + }, - // walk down the path, swapping out linked pathparts for their real - // values - function LOOP() { - // stop if scanned past end of path - if (pos >= p.length) { - if (cache) cache[original] = p; - return cb(null, p); - } + deflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createDeflateRaw(opts), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; +}; - // find the next part - nextPartRe.lastIndex = pos; - var result = nextPartRe.exec(p); - previous = current; - current += result[0]; - base = previous + result[1]; - pos = nextPartRe.lastIndex; - // continue if not a symlink - if (knownHard[base] || (cache && cache[base] === base)) { - return process.nextTick(LOOP); - } +/***/ }), - if (cache && Object.prototype.hasOwnProperty.call(cache, base)) { - // known symbolic link. no need to stat again. - return gotResolvedLink(cache[base]); - } +/***/ 3928: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - return fs.lstat(base, gotStat); - } +exports.Deflater = __nccwpck_require__(7686); +exports.Inflater = __nccwpck_require__(2153); +exports.ZipCrypto = __nccwpck_require__(3228); - function gotStat(err, stat) { - if (err) return cb(err); - // if not a symlink, skip to the next path part - if (!stat.isSymbolicLink()) { - knownHard[base] = true; - if (cache) cache[base] = base; - return process.nextTick(LOOP); - } +/***/ }), - // stat & read the link if not read before - // call gotTarget as soon as the link target is known - // dev/ino always return 0 on windows, so skip the check. - if (!isWindows) { - var id = stat.dev.toString(32) + ':' + stat.ino.toString(32); - if (seenLinks.hasOwnProperty(id)) { - return gotTarget(null, seenLinks[id], base); - } - } - fs.stat(base, function(err) { - if (err) return cb(err); +/***/ 2153: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - fs.readlink(base, function(err, target) { - if (!isWindows) seenLinks[id] = target; - gotTarget(err, target); - }); - }); - } +const version = +(process.versions ? process.versions.node : "").split(".")[0] || 0; - function gotTarget(err, target, base) { - if (err) return cb(err); +module.exports = function (/*Buffer*/ inbuf, /*number*/ expectedLength) { + var zlib = __nccwpck_require__(9796); + const option = version >= 15 && expectedLength > 0 ? { maxOutputLength: expectedLength } : {}; - var resolvedLink = pathModule.resolve(previous, target); - if (cache) cache[base] = resolvedLink; - gotResolvedLink(resolvedLink); - } + return { + inflate: function () { + return zlib.inflateRawSync(inbuf, option); + }, - function gotResolvedLink(resolvedLink) { - // resolve the link, then start over - p = pathModule.resolve(resolvedLink, p.slice(pos)); - start(); - } + inflateAsync: function (/*Function*/ callback) { + var tmp = zlib.createInflateRaw(option), + parts = [], + total = 0; + tmp.on("data", function (data) { + parts.push(data); + total += data.length; + }); + tmp.on("end", function () { + var buf = Buffer.alloc(total), + written = 0; + buf.fill(0); + for (var i = 0; i < parts.length; i++) { + var part = parts[i]; + part.copy(buf, written); + written += part.length; + } + callback && callback(buf); + }); + tmp.end(inbuf); + } + }; }; /***/ }), -/***/ 7625: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { - -exports.setopts = setopts -exports.ownProp = ownProp -exports.makeAbs = makeAbs -exports.finish = finish -exports.mark = mark -exports.isIgnored = isIgnored -exports.childrenIgnored = childrenIgnored - -function ownProp (obj, field) { - return Object.prototype.hasOwnProperty.call(obj, field) -} - -var fs = __nccwpck_require__(7147) -var path = __nccwpck_require__(1017) -var minimatch = __nccwpck_require__(3973) -var isAbsolute = __nccwpck_require__(8714) -var Minimatch = minimatch.Minimatch - -function alphasort (a, b) { - return a.localeCompare(b, 'en') -} - -function setupIgnores (self, options) { - self.ignore = options.ignore || [] +/***/ 3228: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (!Array.isArray(self.ignore)) - self.ignore = [self.ignore] +"use strict"; - if (self.ignore.length) { - self.ignore = self.ignore.map(ignoreMap) - } -} -// ignore patterns are always in dot:true mode. -function ignoreMap (pattern) { - var gmatcher = null - if (pattern.slice(-3) === '/**') { - var gpattern = pattern.replace(/(\/\*\*)+$/, '') - gmatcher = new Minimatch(gpattern, { dot: true }) - } +// node crypt, we use it for generate salt +// eslint-disable-next-line node/no-unsupported-features/node-builtins +const { randomFillSync } = __nccwpck_require__(6113); +const Errors = __nccwpck_require__(1255); - return { - matcher: new Minimatch(pattern, { dot: true }), - gmatcher: gmatcher - } -} - -function setopts (self, pattern, options) { - if (!options) - options = {} - - // base-matching: just use globstar for that. - if (options.matchBase && -1 === pattern.indexOf("/")) { - if (options.noglobstar) { - throw new Error("base matching requires globstar") - } - pattern = "**/" + pattern - } - - self.silent = !!options.silent - self.pattern = pattern - self.strict = options.strict !== false - self.realpath = !!options.realpath - self.realpathCache = options.realpathCache || Object.create(null) - self.follow = !!options.follow - self.dot = !!options.dot - self.mark = !!options.mark - self.nodir = !!options.nodir - if (self.nodir) - self.mark = true - self.sync = !!options.sync - self.nounique = !!options.nounique - self.nonull = !!options.nonull - self.nosort = !!options.nosort - self.nocase = !!options.nocase - self.stat = !!options.stat - self.noprocess = !!options.noprocess - self.absolute = !!options.absolute - self.fs = options.fs || fs - - self.maxLength = options.maxLength || Infinity - self.cache = options.cache || Object.create(null) - self.statCache = options.statCache || Object.create(null) - self.symlinks = options.symlinks || Object.create(null) - - setupIgnores(self, options) - - self.changedCwd = false - var cwd = process.cwd() - if (!ownProp(options, "cwd")) - self.cwd = cwd - else { - self.cwd = path.resolve(options.cwd) - self.changedCwd = self.cwd !== cwd - } - - self.root = options.root || path.resolve(self.cwd, "/") - self.root = path.resolve(self.root) - if (process.platform === "win32") - self.root = self.root.replace(/\\/g, "/") - - // TODO: is an absolute `cwd` supposed to be resolved against `root`? - // e.g. { cwd: '/test', root: __dirname } === path.join(__dirname, '/test') - self.cwdAbs = isAbsolute(self.cwd) ? self.cwd : makeAbs(self, self.cwd) - if (process.platform === "win32") - self.cwdAbs = self.cwdAbs.replace(/\\/g, "/") - self.nomount = !!options.nomount - - // disable comments and negation in Minimatch. - // Note that they are not supported in Glob itself anyway. - options.nonegate = true - options.nocomment = true - // always treat \ in patterns as escapes, not path separators - options.allowWindowsEscape = false - - self.minimatch = new Minimatch(pattern, options) - self.options = self.minimatch.options -} - -function finish (self) { - var nou = self.nounique - var all = nou ? [] : Object.create(null) - - for (var i = 0, l = self.matches.length; i < l; i ++) { - var matches = self.matches[i] - if (!matches || Object.keys(matches).length === 0) { - if (self.nonull) { - // do like the shell, and spit out the literal glob - var literal = self.minimatch.globSet[i] - if (nou) - all.push(literal) - else - all[literal] = true - } - } else { - // had matches - var m = Object.keys(matches) - if (nou) - all.push.apply(all, m) - else - m.forEach(function (m) { - all[m] = true - }) +// generate CRC32 lookup table +const crctable = new Uint32Array(256).map((t, crc) => { + for (let j = 0; j < 8; j++) { + if (0 !== (crc & 1)) { + crc = (crc >>> 1) ^ 0xedb88320; + } else { + crc >>>= 1; + } } - } + return crc >>> 0; +}); - if (!nou) - all = Object.keys(all) +// C-style uInt32 Multiply (discards higher bits, when JS multiply discards lower bits) +const uMul = (a, b) => Math.imul(a, b) >>> 0; - if (!self.nosort) - all = all.sort(alphasort) +// crc32 byte single update (actually same function is part of utils.crc32 function :) ) +const crc32update = (pCrc32, bval) => { + return crctable[(pCrc32 ^ bval) & 0xff] ^ (pCrc32 >>> 8); +}; - // at *some* point we statted all of these - if (self.mark) { - for (var i = 0; i < all.length; i++) { - all[i] = self._mark(all[i]) - } - if (self.nodir) { - all = all.filter(function (e) { - var notDir = !(/\/$/.test(e)) - var c = self.cache[e] || self.cache[makeAbs(self, e)] - if (notDir && c) - notDir = c !== 'DIR' && !Array.isArray(c) - return notDir - }) +// function for generating salt for encrytion header +const genSalt = () => { + if ("function" === typeof randomFillSync) { + return randomFillSync(Buffer.alloc(12)); + } else { + // fallback if function is not defined + return genSalt.node(); } - } - - if (self.ignore.length) - all = all.filter(function(m) { - return !isIgnored(self, m) - }) - - self.found = all -} +}; -function mark (self, p) { - var abs = makeAbs(self, p) - var c = self.cache[abs] - var m = p - if (c) { - var isDir = c === 'DIR' || Array.isArray(c) - var slash = p.slice(-1) === '/' +// salt generation with node random function (mainly as fallback) +genSalt.node = () => { + const salt = Buffer.alloc(12); + const len = salt.length; + for (let i = 0; i < len; i++) salt[i] = (Math.random() * 256) & 0xff; + return salt; +}; - if (isDir && !slash) - m += '/' - else if (!isDir && slash) - m = m.slice(0, -1) +// general config +const config = { + genSalt +}; - if (m !== p) { - var mabs = makeAbs(self, m) - self.statCache[mabs] = self.statCache[abs] - self.cache[mabs] = self.cache[abs] +// Class Initkeys handles same basic ops with keys +function Initkeys(pw) { + const pass = Buffer.isBuffer(pw) ? pw : Buffer.from(pw); + this.keys = new Uint32Array([0x12345678, 0x23456789, 0x34567890]); + for (let i = 0; i < pass.length; i++) { + this.updateKeys(pass[i]); } - } - - return m } -// lotta situps... -function makeAbs (self, f) { - var abs = f - if (f.charAt(0) === '/') { - abs = path.join(self.root, f) - } else if (isAbsolute(f) || f === '') { - abs = f - } else if (self.changedCwd) { - abs = path.resolve(self.cwd, f) - } else { - abs = path.resolve(f) - } - - if (process.platform === 'win32') - abs = abs.replace(/\\/g, '/') - - return abs -} +Initkeys.prototype.updateKeys = function (byteValue) { + const keys = this.keys; + keys[0] = crc32update(keys[0], byteValue); + keys[1] += keys[0] & 0xff; + keys[1] = uMul(keys[1], 134775813) + 1; + keys[2] = crc32update(keys[2], keys[1] >>> 24); + return byteValue; +}; +Initkeys.prototype.next = function () { + const k = (this.keys[2] | 2) >>> 0; // key + return (uMul(k, k ^ 1) >> 8) & 0xff; // decode +}; -// Return true, if pattern ends with globstar '**', for the accompanying parent directory. -// Ex:- If node_modules/** is the pattern, add 'node_modules' to ignore list along with it's contents -function isIgnored (self, path) { - if (!self.ignore.length) - return false +function make_decrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); - return self.ignore.some(function(item) { - return item.matcher.match(path) || !!(item.gmatcher && item.gmatcher.match(path)) - }) + // return decrypter function + return function (/*Buffer*/ data) { + // result - we create new Buffer for results + const result = Buffer.alloc(data.length); + let pos = 0; + // process input data + for (let c of data) { + //c ^= keys.next(); + //result[pos++] = c; // decode & Save Value + result[pos++] = keys.updateKeys(c ^ keys.next()); // update keys with decoded byte + } + return result; + }; } -function childrenIgnored (self, path) { - if (!self.ignore.length) - return false +function make_encrypter(/*Buffer*/ pwd) { + // 1. Stage initialize key + const keys = new Initkeys(pwd); - return self.ignore.some(function(item) { - return !!(item.gmatcher && item.gmatcher.match(path)) - }) + // return encrypting function, result and pos is here so we dont have to merge buffers later + return function (/*Buffer*/ data, /*Buffer*/ result, /* Number */ pos = 0) { + // result - we create new Buffer for results + if (!result) result = Buffer.alloc(data.length); + // process input data + for (let c of data) { + const k = keys.next(); // save key byte + result[pos++] = c ^ k; // save val + keys.updateKeys(c); // update keys with decoded byte + } + return result; + }; } +function decrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd) { + if (!data || !Buffer.isBuffer(data) || data.length < 12) { + return Buffer.alloc(0); + } -/***/ }), - -/***/ 1957: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -// Approach: -// -// 1. Get the minimatch set -// 2. For each pattern in the set, PROCESS(pattern, false) -// 3. Store matches per-set, then uniq them -// -// PROCESS(pattern, inGlobStar) -// Get the first [n] items from pattern that are all strings -// Join these together. This is PREFIX. -// If there is no more remaining, then stat(PREFIX) and -// add to matches if it succeeds. END. -// -// If inGlobStar and PREFIX is symlink and points to dir -// set ENTRIES = [] -// else readdir(PREFIX) as ENTRIES -// If fail, END -// -// with ENTRIES -// If pattern[n] is GLOBSTAR -// // handle the case where the globstar match is empty -// // by pruning it out, and testing the resulting pattern -// PROCESS(pattern[0..n] + pattern[n+1 .. $], false) -// // handle other cases. -// for ENTRY in ENTRIES (not dotfiles) -// // attach globstar + tail onto the entry -// // Mark that this entry is a globstar match -// PROCESS(pattern[0..n] + ENTRY + pattern[n .. $], true) -// -// else // not globstar -// for ENTRY in ENTRIES (not dotfiles, unless pattern[n] is dot) -// Test ENTRY against pattern[n] -// If fails, continue -// If passes, PROCESS(pattern[0..n] + item + pattern[n+1 .. $]) -// -// Caveat: -// Cache all stats and readdirs results to minimize syscall. Since all -// we ever care about is existence and directory-ness, we can just keep -// `true` for files, and [children,...] for directories, or `false` for -// things that don't exist. - -module.exports = glob - -var rp = __nccwpck_require__(6863) -var minimatch = __nccwpck_require__(3973) -var Minimatch = minimatch.Minimatch -var inherits = __nccwpck_require__(4124) -var EE = (__nccwpck_require__(2361).EventEmitter) -var path = __nccwpck_require__(1017) -var assert = __nccwpck_require__(9491) -var isAbsolute = __nccwpck_require__(8714) -var globSync = __nccwpck_require__(9010) -var common = __nccwpck_require__(7625) -var setopts = common.setopts -var ownProp = common.ownProp -var inflight = __nccwpck_require__(2492) -var util = __nccwpck_require__(3837) -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored - -var once = __nccwpck_require__(1223) - -function glob (pattern, options, cb) { - if (typeof options === 'function') cb = options, options = {} - if (!options) options = {} - - if (options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return globSync(pattern, options) - } - - return new Glob(pattern, options, cb) -} + // 1. We Initialize and generate decrypting function + const decrypter = make_decrypter(pwd); -glob.sync = globSync -var GlobSync = glob.GlobSync = globSync.GlobSync + // 2. decrypt salt what is always 12 bytes and is a part of file content + const salt = decrypter(data.slice(0, 12)); -// old api surface -glob.glob = glob + // if bit 3 (0x08) of the general-purpose flags field is set, check salt[11] with the high byte of the header time + // 2 byte data block (as per Info-Zip spec), otherwise check with the high byte of the header entry + const verifyByte = (header.flags & 0x8) === 0x8 ? header.timeHighByte : header.crc >>> 24; -function extend (origin, add) { - if (add === null || typeof add !== 'object') { - return origin - } + //3. does password meet expectations + if (salt[11] !== verifyByte) { + throw Errors.WRONG_PASSWORD(); + } - var keys = Object.keys(add) - var i = keys.length - while (i--) { - origin[keys[i]] = add[keys[i]] - } - return origin + // 4. decode content + return decrypter(data.slice(12)); } -glob.hasMagic = function (pattern, options_) { - var options = extend({}, options_) - options.noprocess = true - - var g = new Glob(pattern, options) - var set = g.minimatch.set - - if (!pattern) - return false - - if (set.length > 1) - return true - - for (var j = 0; j < set[0].length; j++) { - if (typeof set[0][j] !== 'string') - return true - } - - return false +// lets add way to populate salt, NOT RECOMMENDED for production but maybe useful for testing general functionality +function _salter(data) { + if (Buffer.isBuffer(data) && data.length >= 12) { + // be aware - currently salting buffer data is modified + config.genSalt = function () { + return data.slice(0, 12); + }; + } else if (data === "node") { + // test salt generation with node random function + config.genSalt = genSalt.node; + } else { + // if value is not acceptable config gets reset. + config.genSalt = genSalt; + } } -glob.Glob = Glob -inherits(Glob, EE) -function Glob (pattern, options, cb) { - if (typeof options === 'function') { - cb = options - options = null - } - - if (options && options.sync) { - if (cb) - throw new TypeError('callback provided to sync glob') - return new GlobSync(pattern, options) - } - - if (!(this instanceof Glob)) - return new Glob(pattern, options, cb) - - setopts(this, pattern, options) - this._didRealPath = false - - // process each pattern in the minimatch set - var n = this.minimatch.set.length - - // The matches are stored as {: true,...} so that - // duplicates are automagically pruned. - // Later, we do an Object.keys() on these. - // Keep them as a list so we can fill in when nonull is set. - this.matches = new Array(n) - - if (typeof cb === 'function') { - cb = once(cb) - this.on('error', cb) - this.on('end', function (matches) { - cb(null, matches) - }) - } - - var self = this - this._processing = 0 +function encrypt(/*Buffer*/ data, /*Object*/ header, /*String, Buffer*/ pwd, /*Boolean*/ oldlike = false) { + // 1. test data if data is not Buffer we make buffer from it + if (data == null) data = Buffer.alloc(0); + // if data is not buffer be make buffer from it + if (!Buffer.isBuffer(data)) data = Buffer.from(data.toString()); - this._emitQueue = [] - this._processQueue = [] - this.paused = false + // 2. We Initialize and generate encrypting function + const encrypter = make_encrypter(pwd); - if (this.noprocess) - return this + // 3. generate salt (12-bytes of random data) + const salt = config.genSalt(); + salt[11] = (header.crc >>> 24) & 0xff; - if (n === 0) - return done() + // old implementations (before PKZip 2.04g) used two byte check + if (oldlike) salt[10] = (header.crc >>> 16) & 0xff; - var sync = true - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false, done) - } - sync = false + // 4. create output + const result = Buffer.alloc(data.length + 12); + encrypter(salt, result); - function done () { - --self._processing - if (self._processing <= 0) { - if (sync) { - process.nextTick(function () { - self._finish() - }) - } else { - self._finish() - } - } - } + // finally encode content + return encrypter(data, result, 12); } -Glob.prototype._finish = function () { - assert(this instanceof Glob) - if (this.aborted) - return +module.exports = { decrypt, encrypt, _salter }; - if (this.realpath && !this._didRealpath) - return this._realpath() - common.finish(this) - this.emit('end', this.found) -} +/***/ }), -Glob.prototype._realpath = function () { - if (this._didRealpath) - return +/***/ 4522: +/***/ ((module) => { - this._didRealpath = true +module.exports = { + /* The local file header */ + LOCHDR : 30, // LOC header size + LOCSIG : 0x04034b50, // "PK\003\004" + LOCVER : 4, // version needed to extract + LOCFLG : 6, // general purpose bit flag + LOCHOW : 8, // compression method + LOCTIM : 10, // modification time (2 bytes time, 2 bytes date) + LOCCRC : 14, // uncompressed file crc-32 value + LOCSIZ : 18, // compressed size + LOCLEN : 22, // uncompressed size + LOCNAM : 26, // filename length + LOCEXT : 28, // extra field length - var n = this.matches.length - if (n === 0) - return this._finish() + /* The Data descriptor */ + EXTSIG : 0x08074b50, // "PK\007\008" + EXTHDR : 16, // EXT header size + EXTCRC : 4, // uncompressed file crc-32 value + EXTSIZ : 8, // compressed size + EXTLEN : 12, // uncompressed size - var self = this - for (var i = 0; i < this.matches.length; i++) - this._realpathSet(i, next) + /* The central directory file header */ + CENHDR : 46, // CEN header size + CENSIG : 0x02014b50, // "PK\001\002" + CENVEM : 4, // version made by + CENVER : 6, // version needed to extract + CENFLG : 8, // encrypt, decrypt flags + CENHOW : 10, // compression method + CENTIM : 12, // modification time (2 bytes time, 2 bytes date) + CENCRC : 16, // uncompressed file crc-32 value + CENSIZ : 20, // compressed size + CENLEN : 24, // uncompressed size + CENNAM : 28, // filename length + CENEXT : 30, // extra field length + CENCOM : 32, // file comment length + CENDSK : 34, // volume number start + CENATT : 36, // internal file attributes + CENATX : 38, // external file attributes (host system dependent) + CENOFF : 42, // LOC header offset - function next () { - if (--n === 0) - self._finish() - } -} + /* The entries in the end of central directory */ + ENDHDR : 22, // END header size + ENDSIG : 0x06054b50, // "PK\005\006" + ENDSUB : 8, // number of entries on this disk + ENDTOT : 10, // total number of entries + ENDSIZ : 12, // central directory size in bytes + ENDOFF : 16, // offset of first CEN header + ENDCOM : 20, // zip file comment length -Glob.prototype._realpathSet = function (index, cb) { - var matchset = this.matches[index] - if (!matchset) - return cb() + END64HDR : 20, // zip64 END header size + END64SIG : 0x07064b50, // zip64 Locator signature, "PK\006\007" + END64START : 4, // number of the disk with the start of the zip64 + END64OFF : 8, // relative offset of the zip64 end of central directory + END64NUMDISKS : 16, // total number of disks - var found = Object.keys(matchset) - var self = this - var n = found.length + ZIP64SIG : 0x06064b50, // zip64 signature, "PK\006\006" + ZIP64HDR : 56, // zip64 record minimum size + ZIP64LEAD : 12, // leading bytes at the start of the record, not counted by the value stored in ZIP64SIZE + ZIP64SIZE : 4, // zip64 size of the central directory record + ZIP64VEM : 12, // zip64 version made by + ZIP64VER : 14, // zip64 version needed to extract + ZIP64DSK : 16, // zip64 number of this disk + ZIP64DSKDIR : 20, // number of the disk with the start of the record directory + ZIP64SUB : 24, // number of entries on this disk + ZIP64TOT : 32, // total number of entries + ZIP64SIZB : 40, // zip64 central directory size in bytes + ZIP64OFF : 48, // offset of start of central directory with respect to the starting disk number + ZIP64EXTRA : 56, // extensible data sector - if (n === 0) - return cb() + /* Compression methods */ + STORED : 0, // no compression + SHRUNK : 1, // shrunk + REDUCED1 : 2, // reduced with compression factor 1 + REDUCED2 : 3, // reduced with compression factor 2 + REDUCED3 : 4, // reduced with compression factor 3 + REDUCED4 : 5, // reduced with compression factor 4 + IMPLODED : 6, // imploded + // 7 reserved for Tokenizing compression algorithm + DEFLATED : 8, // deflated + ENHANCED_DEFLATED: 9, // enhanced deflated + PKWARE : 10,// PKWare DCL imploded + // 11 reserved by PKWARE + BZIP2 : 12, // compressed using BZIP2 + // 13 reserved by PKWARE + LZMA : 14, // LZMA + // 15-17 reserved by PKWARE + IBM_TERSE : 18, // compressed using IBM TERSE + IBM_LZ77 : 19, // IBM LZ77 z + AES_ENCRYPT : 99, // WinZIP AES encryption method - var set = this.matches[index] = Object.create(null) - found.forEach(function (p, i) { - // If there's a problem with the stat, then it means that - // one or more of the links in the realpath couldn't be - // resolved. just return the abs value in that case. - p = self._makeAbs(p) - rp.realpath(p, self.realpathCache, function (er, real) { - if (!er) - set[real] = true - else if (er.syscall === 'stat') - set[p] = true - else - self.emit('error', er) // srsly wtf right here + /* General purpose bit flag */ + // values can obtained with expression 2**bitnr + FLG_ENC : 1, // Bit 0: encrypted file + FLG_COMP1 : 2, // Bit 1, compression option + FLG_COMP2 : 4, // Bit 2, compression option + FLG_DESC : 8, // Bit 3, data descriptor + FLG_ENH : 16, // Bit 4, enhanced deflating + FLG_PATCH : 32, // Bit 5, indicates that the file is compressed patched data. + FLG_STR : 64, // Bit 6, strong encryption (patented) + // Bits 7-10: Currently unused. + FLG_EFS : 2048, // Bit 11: Language encoding flag (EFS) + // Bit 12: Reserved by PKWARE for enhanced compression. + // Bit 13: encrypted the Central Directory (patented). + // Bits 14-15: Reserved by PKWARE. + FLG_MSK : 4096, // mask header values - if (--n === 0) { - self.matches[index] = set - cb() - } - }) - }) -} + /* Load type */ + FILE : 2, + BUFFER : 1, + NONE : 0, -Glob.prototype._mark = function (p) { - return common.mark(this, p) -} + /* 4.5 Extensible data fields */ + EF_ID : 0, + EF_SIZE : 2, -Glob.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} + /* Header IDs */ + ID_ZIP64 : 0x0001, + ID_AVINFO : 0x0007, + ID_PFS : 0x0008, + ID_OS2 : 0x0009, + ID_NTFS : 0x000a, + ID_OPENVMS : 0x000c, + ID_UNIX : 0x000d, + ID_FORK : 0x000e, + ID_PATCH : 0x000f, + ID_X509_PKCS7 : 0x0014, + ID_X509_CERTID_F : 0x0015, + ID_X509_CERTID_C : 0x0016, + ID_STRONGENC : 0x0017, + ID_RECORD_MGT : 0x0018, + ID_X509_PKCS7_RL : 0x0019, + ID_IBM1 : 0x0065, + ID_IBM2 : 0x0066, + ID_POSZIP : 0x4690, -Glob.prototype.abort = function () { - this.aborted = true - this.emit('abort') -} + EF_ZIP64_OR_32 : 0xffffffff, + EF_ZIP64_OR_16 : 0xffff, + EF_ZIP64_SUNCOMP : 0, + EF_ZIP64_SCOMP : 8, + EF_ZIP64_RHO : 16, + EF_ZIP64_DSN : 24 +}; -Glob.prototype.pause = function () { - if (!this.paused) { - this.paused = true - this.emit('pause') - } -} -Glob.prototype.resume = function () { - if (this.paused) { - this.emit('resume') - this.paused = false - if (this._emitQueue.length) { - var eq = this._emitQueue.slice(0) - this._emitQueue.length = 0 - for (var i = 0; i < eq.length; i ++) { - var e = eq[i] - this._emitMatch(e[0], e[1]) - } - } - if (this._processQueue.length) { - var pq = this._processQueue.slice(0) - this._processQueue.length = 0 - for (var i = 0; i < pq.length; i ++) { - var p = pq[i] - this._processing-- - this._process(p[0], p[1], p[2], p[3]) - } - } - } -} +/***/ }), -Glob.prototype._process = function (pattern, index, inGlobStar, cb) { - assert(this instanceof Glob) - assert(typeof cb === 'function') +/***/ 7396: +/***/ ((module) => { - if (this.aborted) - return +module.exports = { + efs: true, + encode: (data) => Buffer.from(data, "utf8"), + decode: (data) => data.toString("utf8") +}; - this._processing++ - if (this.paused) { - this._processQueue.push([pattern, index, inGlobStar, cb]) - return - } - //console.error('PROCESS %d', this._processing, pattern) +/***/ }), - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. +/***/ 1255: +/***/ ((__unused_webpack_module, exports) => { - // see if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index, cb) - return +const errors = { + /* Header error messages */ + INVALID_LOC: "Invalid LOC header (bad signature)", + INVALID_CEN: "Invalid CEN header (bad signature)", + INVALID_END: "Invalid END header (bad signature)", - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break + /* Descriptor */ + DESCRIPTOR_NOT_EXIST: "No descriptor present", + DESCRIPTOR_UNKNOWN: "Unknown descriptor format", + DESCRIPTOR_FAULTY: "Descriptor data is malformed", - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } + /* ZipEntry error messages*/ + NO_DATA: "Nothing to decompress", + BAD_CRC: "CRC32 checksum failed {0}", + FILE_IN_THE_WAY: "There is a file in the way: {0}", + UNKNOWN_METHOD: "Invalid/unsupported compression method", - var remain = pattern.slice(n) + /* Inflater error messages */ + AVAIL_DATA: "inflate::Available inflate data did not terminate", + INVALID_DISTANCE: "inflate::Invalid literal/length or distance code in fixed or dynamic block", + TO_MANY_CODES: "inflate::Dynamic block code description: too many length or distance codes", + INVALID_REPEAT_LEN: "inflate::Dynamic block code description: repeat more than specified lengths", + INVALID_REPEAT_FIRST: "inflate::Dynamic block code description: repeat lengths with no first length", + INCOMPLETE_CODES: "inflate::Dynamic block code description: code lengths codes incomplete", + INVALID_DYN_DISTANCE: "inflate::Dynamic block code description: invalid distance code lengths", + INVALID_CODES_LEN: "inflate::Dynamic block code description: invalid literal/length code lengths", + INVALID_STORE_BLOCK: "inflate::Stored block length did not match one's complement", + INVALID_BLOCK_TYPE: "inflate::Invalid block type (type == 3)", - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + /* ADM-ZIP error messages */ + CANT_EXTRACT_FILE: "Could not extract the file", + CANT_OVERRIDE: "Target file already exists", + DISK_ENTRY_TOO_LARGE: "Number of disk entries is too large", + NO_ZIP: "No zip file was loaded", + NO_ENTRY: "Entry doesn't exist", + DIRECTORY_CONTENT_ERROR: "A directory cannot have content", + FILE_NOT_FOUND: 'File not found: "{0}"', + NOT_IMPLEMENTED: "Not implemented", + INVALID_FILENAME: "Invalid filename", + INVALID_FORMAT: "Invalid or unsupported zip format. No END header found", + INVALID_PASS_PARAM: "Incompatible password parameter", + WRONG_PASSWORD: "Wrong Password", - var abs = this._makeAbs(read) + /* ADM-ZIP */ + COMMENT_TOO_LONG: "Comment is too long", // Comment can be max 65535 bytes long (NOTE: some non-US characters may take more space) + EXTRA_FIELD_PARSE_ERROR: "Extra field parsing error" +}; - //if ignored, skip _processing - if (childrenIgnored(this, read)) - return cb() +// template +function E(message) { + return function (...args) { + if (args.length) { // Allow {0} .. {9} arguments in error message, based on argument number + message = message.replace(/\{(\d)\}/g, (_, n) => args[n] || ''); + } - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar, cb) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar, cb) + return new Error('ADM-ZIP: ' + message); + }; } -Glob.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - return self._processReaddir2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) +// Init errors with template +for (const msg of Object.keys(errors)) { + exports[msg] = E(errors[msg]); } -Glob.prototype._processReaddir2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - - // if the abs isn't a dir, then nothing can match! - if (!entries) - return cb() - - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } - - //console.error('prd2', prefix, entries, remain[0]._glob, matchedEntries) - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return cb() +/***/ }), - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. +/***/ 8321: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) +const pth = __nccwpck_require__(1017); - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e - } +module.exports = function (/*String*/ path, /*Utils object*/ { fs }) { + var _path = path || "", + _obj = newAttr(), + _stat = null; - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) + function newAttr() { + return { + directory: false, + readonly: false, + hidden: false, + executable: false, + mtime: 0, + atime: 0 + }; } - // This was the last one, and no stats were needed - return cb() - } - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) { - if (prefix !== '/') - e = prefix + '/' + e - else - e = prefix + e + if (_path && fs.existsSync(_path)) { + _stat = fs.statSync(_path); + _obj.directory = _stat.isDirectory(); + _obj.mtime = _stat.mtime; + _obj.atime = _stat.atime; + _obj.executable = (0o111 & _stat.mode) !== 0; // file is executable who ever har right not just owner + _obj.readonly = (0o200 & _stat.mode) === 0; // readonly if owner has no write right + _obj.hidden = pth.basename(_path)[0] === "."; + } else { + console.warn("Invalid path: " + _path); } - this._process([e].concat(remain), index, inGlobStar, cb) - } - cb() -} - -Glob.prototype._emitMatch = function (index, e) { - if (this.aborted) - return - if (isIgnored(this, e)) - return + return { + get directory() { + return _obj.directory; + }, - if (this.paused) { - this._emitQueue.push([index, e]) - return - } + get readOnly() { + return _obj.readonly; + }, - var abs = isAbsolute(e) ? e : this._makeAbs(e) + get hidden() { + return _obj.hidden; + }, - if (this.mark) - e = this._mark(e) + get mtime() { + return _obj.mtime; + }, - if (this.absolute) - e = abs + get atime() { + return _obj.atime; + }, - if (this.matches[index][e]) - return + get executable() { + return _obj.executable; + }, - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } + decodeAttributes: function () {}, - this.matches[index][e] = true + encodeAttributes: function () {}, - var st = this.statCache[abs] - if (st) - this.emit('stat', e, st) + toJSON: function () { + return { + path: _path, + isDirectory: _obj.directory, + isReadOnly: _obj.readonly, + isHidden: _obj.hidden, + isExecutable: _obj.executable, + mTime: _obj.mtime, + aTime: _obj.atime + }; + }, - this.emit('match', e) -} + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; -Glob.prototype._readdirInGlobStar = function (abs, cb) { - if (this.aborted) - return - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false, cb) +/***/ }), - var lstatkey = 'lstat\0' + abs - var self = this - var lstatcb = inflight(lstatkey, lstatcb_) +/***/ 5182: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - if (lstatcb) - self.fs.lstat(abs, lstatcb) +module.exports = __nccwpck_require__(1291); +module.exports.Constants = __nccwpck_require__(4522); +module.exports.Errors = __nccwpck_require__(1255); +module.exports.FileAttr = __nccwpck_require__(8321); +module.exports.decoder = __nccwpck_require__(7396); - function lstatcb_ (er, lstat) { - if (er && er.code === 'ENOENT') - return cb() - var isSym = lstat && lstat.isSymbolicLink() - self.symlinks[abs] = isSym +/***/ }), - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) { - self.cache[abs] = 'FILE' - cb() - } else - self._readdir(abs, false, cb) - } -} +/***/ 1291: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -Glob.prototype._readdir = function (abs, inGlobStar, cb) { - if (this.aborted) - return +const fsystem = __nccwpck_require__(7147); +const pth = __nccwpck_require__(1017); +const Constants = __nccwpck_require__(4522); +const Errors = __nccwpck_require__(1255); +const isWin = typeof process === "object" && "win32" === process.platform; - cb = inflight('readdir\0'+abs+'\0'+inGlobStar, cb) - if (!cb) - return +const is_Obj = (obj) => typeof obj === "object" && obj !== null; - //console.error('RD %j %j', +inGlobStar, abs) - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs, cb) +// generate CRC32 lookup table +const crcTable = new Uint32Array(256).map((t, c) => { + for (let k = 0; k < 8; k++) { + if ((c & 1) !== 0) { + c = 0xedb88320 ^ (c >>> 1); + } else { + c >>>= 1; + } + } + return c >>> 0; +}); - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return cb() +// UTILS functions - if (Array.isArray(c)) - return cb(null, c) - } +function Utils(opts) { + this.sep = pth.sep; + this.fs = fsystem; - var self = this - self.fs.readdir(abs, readdirCb(this, abs, cb)) + if (is_Obj(opts)) { + // custom filesystem + if (is_Obj(opts.fs) && typeof opts.fs.statSync === "function") { + this.fs = opts.fs; + } + } } -function readdirCb (self, abs, cb) { - return function (er, entries) { - if (er) - self._readdirError(abs, er, cb) - else - self._readdirEntries(abs, entries, cb) - } -} +module.exports = Utils; -Glob.prototype._readdirEntries = function (abs, entries, cb) { - if (this.aborted) - return +// INSTANTIABLE functions - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true +Utils.prototype.makeDir = function (/*String*/ folder) { + const self = this; + + // Sync - make directories tree + function mkdirSync(/*String*/ fpath) { + let resolvedPath = fpath.split(self.sep)[0]; + fpath.split(self.sep).forEach(function (name) { + if (!name || name.substr(-1, 1) === ":") return; + resolvedPath += self.sep + name; + var stat; + try { + stat = self.fs.statSync(resolvedPath); + } catch (e) { + self.fs.mkdirSync(resolvedPath); + } + if (stat && stat.isFile()) throw Errors.FILE_IN_THE_WAY(`"${resolvedPath}"`); + }); } - } - this.cache[abs] = entries - return cb(null, entries) -} + mkdirSync(folder); +}; -Glob.prototype._readdirError = function (f, er, cb) { - if (this.aborted) - return +Utils.prototype.writeFileTo = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr) { + const self = this; + if (self.fs.existsSync(path)) { + if (!overwrite) return false; // cannot overwrite - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - this.emit('error', error) - this.abort() - } - break + var stat = self.fs.statSync(path); + if (stat.isDirectory()) { + return false; + } + } + var folder = pth.dirname(path); + if (!self.fs.existsSync(folder)) { + self.makeDir(folder); + } - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break + var fd; + try { + fd = self.fs.openSync(path, "w", 0o666); // 0666 + } catch (e) { + self.fs.chmodSync(path, 0o666); + fd = self.fs.openSync(path, "w", 0o666); + } + if (fd) { + try { + self.fs.writeSync(fd, content, 0, content.length, 0); + } finally { + self.fs.closeSync(fd); + } + } + self.fs.chmodSync(path, attr || 0o666); + return true; +}; - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) { - this.emit('error', er) - // If the error is handled, then we abort - // if not, we threw out of here - this.abort() - } - if (!this.silent) - console.error('glob error', er) - break - } +Utils.prototype.writeFileToAsync = function (/*String*/ path, /*Buffer*/ content, /*Boolean*/ overwrite, /*Number*/ attr, /*Function*/ callback) { + if (typeof attr === "function") { + callback = attr; + attr = undefined; + } - return cb() -} + const self = this; -Glob.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar, cb) { - var self = this - this._readdir(abs, inGlobStar, function (er, entries) { - self._processGlobStar2(prefix, read, abs, remain, index, inGlobStar, entries, cb) - }) -} + self.fs.exists(path, function (exist) { + if (exist && !overwrite) return callback(false); + self.fs.stat(path, function (err, stat) { + if (exist && stat.isDirectory()) { + return callback(false); + } -Glob.prototype._processGlobStar2 = function (prefix, read, abs, remain, index, inGlobStar, entries, cb) { - //console.error('pgs2', prefix, remain[0], entries) + var folder = pth.dirname(path); + self.fs.exists(folder, function (exists) { + if (!exists) self.makeDir(folder); - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return cb() + self.fs.open(path, "w", 0o666, function (err, fd) { + if (err) { + self.fs.chmod(path, 0o666, function () { + self.fs.open(path, "w", 0o666, function (err, fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 0o666, function () { + callback(true); + }); + }); + }); + }); + }); + } else if (fd) { + self.fs.write(fd, content, 0, content.length, 0, function () { + self.fs.close(fd, function () { + self.fs.chmod(path, attr || 0o666, function () { + callback(true); + }); + }); + }); + } else { + self.fs.chmod(path, attr || 0o666, function () { + callback(true); + }); + } + }); + }); + }); + }); +}; - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) +Utils.prototype.findFiles = function (/*String*/ path) { + const self = this; - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false, cb) + function findSync(/*String*/ dir, /*RegExp*/ pattern, /*Boolean*/ recursive) { + if (typeof pattern === "boolean") { + recursive = pattern; + pattern = undefined; + } + let files = []; + self.fs.readdirSync(dir).forEach(function (file) { + const path = pth.join(dir, file); + const stat = self.fs.statSync(path); - var isSym = this.symlinks[abs] - var len = entries.length + if (!pattern || pattern.test(path)) { + files.push(pth.normalize(path) + (stat.isDirectory() ? self.sep : "")); + } - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return cb() + if (stat.isDirectory() && recursive) files = files.concat(findSync(path, pattern, recursive)); + }); + return files; + } - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue + return findSync(path, undefined, true); +}; - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true, cb) +/** + * Callback for showing if everything was done. + * + * @callback filelistCallback + * @param {Error} err - Error object + * @param {string[]} list - was request fully completed + */ - var below = gspref.concat(entries[i], remain) - this._process(below, index, true, cb) - } +/** + * + * @param {string} dir + * @param {filelistCallback} cb + */ +Utils.prototype.findFilesAsync = function (dir, cb) { + const self = this; + let results = []; + self.fs.readdir(dir, function (err, list) { + if (err) return cb(err); + let list_length = list.length; + if (!list_length) return cb(null, results); + list.forEach(function (file) { + file = pth.join(dir, file); + self.fs.stat(file, function (err, stat) { + if (err) return cb(err); + if (stat) { + results.push(pth.normalize(file) + (stat.isDirectory() ? self.sep : "")); + if (stat.isDirectory()) { + self.findFilesAsync(file, function (err, res) { + if (err) return cb(err); + results = results.concat(res); + if (!--list_length) cb(null, results); + }); + } else { + if (!--list_length) cb(null, results); + } + } + }); + }); + }); +}; - cb() -} +Utils.prototype.getAttributes = function () {}; -Glob.prototype._processSimple = function (prefix, index, cb) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var self = this - this._stat(prefix, function (er, exists) { - self._processSimple2(prefix, index, er, exists, cb) - }) -} -Glob.prototype._processSimple2 = function (prefix, index, er, exists, cb) { +Utils.prototype.setAttributes = function () {}; - //console.error('ps2', prefix, exists) +// STATIC functions + +// crc32 single update (it is part of crc32) +Utils.crc32update = function (crc, byte) { + return crcTable[(crc ^ byte) & 0xff] ^ (crc >>> 8); +}; - if (!this.matches[index]) - this.matches[index] = Object.create(null) +Utils.crc32 = function (buf) { + if (typeof buf === "string") { + buf = Buffer.from(buf, "utf8"); + } - // If it doesn't exist, then just mark the lack of results - if (!exists) - return cb() + let len = buf.length; + let crc = ~0; + for (let off = 0; off < len; ) crc = Utils.crc32update(crc, buf[off++]); + // xor and cast as uint32 number + return ~crc >>> 0; +}; - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' +Utils.methodToString = function (/*Number*/ method) { + switch (method) { + case Constants.STORED: + return "STORED (" + method + ")"; + case Constants.DEFLATED: + return "DEFLATED (" + method + ")"; + default: + return "UNSUPPORTED (" + method + ")"; } - } +}; - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') +/** + * removes ".." style path elements + * @param {string} path - fixable path + * @returns string - fixed filepath + */ +Utils.canonical = function (/*string*/ path) { + if (!path) return ""; + // trick normalize think path is absolute + const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.join(".", safeSuffix); +}; - // Mark this as a match - this._emitMatch(index, prefix) - cb() -} +/** + * fix file names in achive + * @param {string} path - fixable path + * @returns string - fixed filepath + */ -// Returns either 'DIR', 'FILE', or false -Glob.prototype._stat = function (f, cb) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' +Utils.zipnamefix = function (path) { + if (!path) return ""; + // trick normalize think path is absolute + const safeSuffix = pth.posix.normalize("/" + path.split("\\").join("/")); + return pth.posix.join(".", safeSuffix); +}; - if (f.length > this.maxLength) - return cb() +/** + * + * @param {Array} arr + * @param {function} callback + * @returns + */ +Utils.findLast = function (arr, callback) { + if (!Array.isArray(arr)) throw new TypeError("arr is not array"); + + const len = arr.length >>> 0; + for (let i = len - 1; i >= 0; i--) { + if (callback(arr[i], i, arr)) { + return arr[i]; + } + } + return void 0; +}; - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] +// make abolute paths taking prefix as root folder +Utils.sanitize = function (/*string*/ prefix, /*string*/ name) { + prefix = pth.resolve(pth.normalize(prefix)); + var parts = name.split("/"); + for (var i = 0, l = parts.length; i < l; i++) { + var path = pth.normalize(pth.join(prefix, parts.slice(i, l).join(pth.sep))); + if (path.indexOf(prefix) === 0) { + return path; + } + } + return pth.normalize(pth.join(prefix, pth.basename(name))); +}; - if (Array.isArray(c)) - c = 'DIR' +// converts buffer, Uint8Array, string types to buffer +Utils.toBuffer = function toBuffer(/*buffer, Uint8Array, string*/ input, /* function */ encoder) { + if (Buffer.isBuffer(input)) { + return input; + } else if (input instanceof Uint8Array) { + return Buffer.from(input); + } else { + // expect string all other values are invalid and return empty buffer + return typeof input === "string" ? encoder(input) : Buffer.alloc(0); + } +}; - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return cb(null, c) +Utils.readBigUInt64LE = function (/*Buffer*/ buffer, /*int*/ index) { + var slice = Buffer.from(buffer.slice(index, index + 8)); + slice.swap64(); - if (needDir && c === 'FILE') - return cb() + return parseInt(`0x${slice.toString("hex")}`); +}; - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } +Utils.fromDOS2Date = function (val) { + return new Date(((val >> 25) & 0x7f) + 1980, Math.max(((val >> 21) & 0x0f) - 1, 0), Math.max((val >> 16) & 0x1f, 1), (val >> 11) & 0x1f, (val >> 5) & 0x3f, (val & 0x1f) << 1); +}; - var exists - var stat = this.statCache[abs] - if (stat !== undefined) { - if (stat === false) - return cb(null, stat) - else { - var type = stat.isDirectory() ? 'DIR' : 'FILE' - if (needDir && type === 'FILE') - return cb() - else - return cb(null, type, stat) +Utils.fromDate2DOS = function (val) { + let date = 0; + let time = 0; + if (val.getFullYear() > 1979) { + date = (((val.getFullYear() - 1980) & 0x7f) << 9) | ((val.getMonth() + 1) << 5) | val.getDate(); + time = (val.getHours() << 11) | (val.getMinutes() << 5) | (val.getSeconds() >> 1); } - } + return (date << 16) | time; +}; + +Utils.isWin = isWin; // Do we have windows system +Utils.crcTable = crcTable; - var self = this - var statcb = inflight('stat\0' + abs, lstatcb_) - if (statcb) - self.fs.lstat(abs, statcb) - function lstatcb_ (er, lstat) { - if (lstat && lstat.isSymbolicLink()) { - // If it's a symlink, then treat it as the target, unless - // the target does not exist, then treat it as a file. - return self.fs.stat(abs, function (er, stat) { - if (er) - self._stat2(f, abs, null, lstat, cb) - else - self._stat2(f, abs, er, stat, cb) - }) - } else { - self._stat2(f, abs, er, lstat, cb) - } - } -} +/***/ }), -Glob.prototype._stat2 = function (f, abs, er, stat, cb) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return cb() - } +/***/ 4057: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + +var Utils = __nccwpck_require__(5182), + Headers = __nccwpck_require__(4958), + Constants = Utils.Constants, + Methods = __nccwpck_require__(3928); - var needDir = f.slice(-1) === '/' - this.statCache[abs] = stat +module.exports = function (/** object */ options, /*Buffer*/ input) { + var _centralHeader = new Headers.EntryHeader(), + _entryName = Buffer.alloc(0), + _comment = Buffer.alloc(0), + _isDirectory = false, + uncompressedData = null, + _extra = Buffer.alloc(0), + _extralocal = Buffer.alloc(0), + _efs = true; - if (abs.slice(-1) === '/' && stat && !stat.isDirectory()) - return cb(null, false, stat) + // assign options + const opts = options; - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' - this.cache[abs] = this.cache[abs] || c + const decoder = typeof opts.decoder === "object" ? opts.decoder : Utils.decoder; + _efs = decoder.hasOwnProperty("efs") ? decoder.efs : false; - if (needDir && c === 'FILE') - return cb() + function getCompressedDataFromZip() { + //if (!input || !Buffer.isBuffer(input)) { + if (!input || !(input instanceof Uint8Array)) { + return Buffer.alloc(0); + } + _extralocal = _centralHeader.loadLocalHeaderFromBinary(input); + return input.slice(_centralHeader.realDataOffset, _centralHeader.realDataOffset + _centralHeader.compressedSize); + } - return cb(null, c, stat) -} + function crc32OK(data) { + // if bit 3 (0x08) of the general-purpose flags field is set, then the CRC-32 and file sizes are not known when the local header is written + if (!_centralHeader.flags_desc) { + if (Utils.crc32(data) !== _centralHeader.localHeader.crc) { + return false; + } + } else { + const descriptor = {}; + const dataEndOffset = _centralHeader.realDataOffset + _centralHeader.compressedSize; + // no descriptor after compressed data, instead new local header + if (input.readUInt32LE(dataEndOffset) == Constants.LOCSIG || input.readUInt32LE(dataEndOffset) == Constants.CENSIG) { + throw Utils.Errors.DESCRIPTOR_NOT_EXIST(); + } + // get decriptor data + if (input.readUInt32LE(dataEndOffset) == Constants.EXTSIG) { + // descriptor with signature + descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC); + descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ); + descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN); + } else if (input.readUInt16LE(dataEndOffset + 12) === 0x4b50) { + // descriptor without signature (we check is new header starting where we expect) + descriptor.crc = input.readUInt32LE(dataEndOffset + Constants.EXTCRC - 4); + descriptor.compressedSize = input.readUInt32LE(dataEndOffset + Constants.EXTSIZ - 4); + descriptor.size = input.readUInt32LE(dataEndOffset + Constants.EXTLEN - 4); + } else { + throw Utils.Errors.DESCRIPTOR_UNKNOWN(); + } -/***/ }), + // check data integrity + if (descriptor.compressedSize !== _centralHeader.compressedSize || descriptor.size !== _centralHeader.size || descriptor.crc !== _centralHeader.crc) { + throw Utils.Errors.DESCRIPTOR_FAULTY(); + } + if (Utils.crc32(data) !== descriptor.crc) { + return false; + } -/***/ 9010: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // @TODO: zip64 bit descriptor fields + // if bit 3 is set and any value in local header "zip64 Extended information" extra field are set 0 (place holder) + // then 64-bit descriptor format is used instead of 32-bit + // central header - "zip64 Extended information" extra field should store real values and not place holders + } + return true; + } + + function decompress(/*Boolean*/ async, /*Function*/ callback, /*String, Buffer*/ pass) { + if (typeof callback === "undefined" && typeof async === "string") { + pass = async; + async = void 0; + } + if (_isDirectory) { + if (async && callback) { + callback(Buffer.alloc(0), Utils.Errors.DIRECTORY_CONTENT_ERROR()); //si added error. + } + return Buffer.alloc(0); + } -module.exports = globSync -globSync.GlobSync = GlobSync + var compressedData = getCompressedDataFromZip(); -var rp = __nccwpck_require__(6863) -var minimatch = __nccwpck_require__(3973) -var Minimatch = minimatch.Minimatch -var Glob = (__nccwpck_require__(1957).Glob) -var util = __nccwpck_require__(3837) -var path = __nccwpck_require__(1017) -var assert = __nccwpck_require__(9491) -var isAbsolute = __nccwpck_require__(8714) -var common = __nccwpck_require__(7625) -var setopts = common.setopts -var ownProp = common.ownProp -var childrenIgnored = common.childrenIgnored -var isIgnored = common.isIgnored + if (compressedData.length === 0) { + // File is empty, nothing to decompress. + if (async && callback) callback(compressedData); + return compressedData; + } -function globSync (pattern, options) { - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') + if (_centralHeader.encrypted) { + if ("string" !== typeof pass && !Buffer.isBuffer(pass)) { + throw Utils.Errors.INVALID_PASS_PARAM(); + } + compressedData = Methods.ZipCrypto.decrypt(compressedData, _centralHeader, pass); + } - return new GlobSync(pattern, options).found -} + var data = Buffer.alloc(_centralHeader.size); -function GlobSync (pattern, options) { - if (!pattern) - throw new Error('must provide pattern') + switch (_centralHeader.method) { + case Utils.Constants.STORED: + compressedData.copy(data); + if (!crc32OK(data)) { + if (async && callback) callback(data, Utils.Errors.BAD_CRC()); //si added error + throw Utils.Errors.BAD_CRC(); + } else { + //si added otherwise did not seem to return data. + if (async && callback) callback(data); + return data; + } + case Utils.Constants.DEFLATED: + var inflater = new Methods.Inflater(compressedData, _centralHeader.size); + if (!async) { + const result = inflater.inflate(data); + result.copy(data, 0); + if (!crc32OK(data)) { + throw Utils.Errors.BAD_CRC(`"${decoder.decode(_entryName)}"`); + } + return data; + } else { + inflater.inflateAsync(function (result) { + result.copy(result, 0); + if (callback) { + if (!crc32OK(result)) { + callback(result, Utils.Errors.BAD_CRC()); //si added error + } else { + callback(result); + } + } + }); + } + break; + default: + if (async && callback) callback(Buffer.alloc(0), Utils.Errors.UNKNOWN_METHOD()); + throw Utils.Errors.UNKNOWN_METHOD(); + } + } - if (typeof options === 'function' || arguments.length === 3) - throw new TypeError('callback provided to sync glob\n'+ - 'See: https://github.com/isaacs/node-glob/issues/167') + function compress(/*Boolean*/ async, /*Function*/ callback) { + if ((!uncompressedData || !uncompressedData.length) && Buffer.isBuffer(input)) { + // no data set or the data wasn't changed to require recompression + if (async && callback) callback(getCompressedDataFromZip()); + return getCompressedDataFromZip(); + } - if (!(this instanceof GlobSync)) - return new GlobSync(pattern, options) + if (uncompressedData.length && !_isDirectory) { + var compressedData; + // Local file header + switch (_centralHeader.method) { + case Utils.Constants.STORED: + _centralHeader.compressedSize = _centralHeader.size; - setopts(this, pattern, options) + compressedData = Buffer.alloc(uncompressedData.length); + uncompressedData.copy(compressedData); - if (this.noprocess) - return this + if (async && callback) callback(compressedData); + return compressedData; + default: + case Utils.Constants.DEFLATED: + var deflater = new Methods.Deflater(uncompressedData); + if (!async) { + var deflated = deflater.deflate(); + _centralHeader.compressedSize = deflated.length; + return deflated; + } else { + deflater.deflateAsync(function (data) { + compressedData = Buffer.alloc(data.length); + _centralHeader.compressedSize = data.length; + data.copy(compressedData); + callback && callback(compressedData); + }); + } + deflater = null; + break; + } + } else if (async && callback) { + callback(Buffer.alloc(0)); + } else { + return Buffer.alloc(0); + } + } - var n = this.minimatch.set.length - this.matches = new Array(n) - for (var i = 0; i < n; i ++) { - this._process(this.minimatch.set[i], i, false) - } - this._finish() -} + function readUInt64LE(buffer, offset) { + return (buffer.readUInt32LE(offset + 4) << 4) + buffer.readUInt32LE(offset); + } -GlobSync.prototype._finish = function () { - assert.ok(this instanceof GlobSync) - if (this.realpath) { - var self = this - this.matches.forEach(function (matchset, index) { - var set = self.matches[index] = Object.create(null) - for (var p in matchset) { + function parseExtra(data) { try { - p = self._makeAbs(p) - var real = rp.realpathSync(p, self.realpathCache) - set[real] = true - } catch (er) { - if (er.syscall === 'stat') - set[self._makeAbs(p)] = true - else - throw er + var offset = 0; + var signature, size, part; + while (offset + 4 < data.length) { + signature = data.readUInt16LE(offset); + offset += 2; + size = data.readUInt16LE(offset); + offset += 2; + part = data.slice(offset, offset + size); + offset += size; + if (Constants.ID_ZIP64 === signature) { + parseZip64ExtendedInformation(part); + } + } + } catch (error) { + throw Utils.Errors.EXTRA_FIELD_PARSE_ERROR(); } - } - }) - } - common.finish(this) -} - - -GlobSync.prototype._process = function (pattern, index, inGlobStar) { - assert.ok(this instanceof GlobSync) - - // Get the first [n] parts of pattern that are all strings. - var n = 0 - while (typeof pattern[n] === 'string') { - n ++ - } - // now n is the index of the first one that is *not* a string. - - // See if there's anything else - var prefix - switch (n) { - // if not, then this is rather simple - case pattern.length: - this._processSimple(pattern.join('/'), index) - return + } - case 0: - // pattern *starts* with some non-trivial item. - // going to readdir(cwd), but not include the prefix in matches. - prefix = null - break + //Override header field values with values from the ZIP64 extra field + function parseZip64ExtendedInformation(data) { + var size, compressedSize, offset, diskNumStart; - default: - // pattern has some string bits in the front. - // whatever it starts with, whether that's 'absolute' like /foo/bar, - // or 'relative' like '../baz' - prefix = pattern.slice(0, n).join('/') - break - } + if (data.length >= Constants.EF_ZIP64_SCOMP) { + size = readUInt64LE(data, Constants.EF_ZIP64_SUNCOMP); + if (_centralHeader.size === Constants.EF_ZIP64_OR_32) { + _centralHeader.size = size; + } + } + if (data.length >= Constants.EF_ZIP64_RHO) { + compressedSize = readUInt64LE(data, Constants.EF_ZIP64_SCOMP); + if (_centralHeader.compressedSize === Constants.EF_ZIP64_OR_32) { + _centralHeader.compressedSize = compressedSize; + } + } + if (data.length >= Constants.EF_ZIP64_DSN) { + offset = readUInt64LE(data, Constants.EF_ZIP64_RHO); + if (_centralHeader.offset === Constants.EF_ZIP64_OR_32) { + _centralHeader.offset = offset; + } + } + if (data.length >= Constants.EF_ZIP64_DSN + 4) { + diskNumStart = data.readUInt32LE(Constants.EF_ZIP64_DSN); + if (_centralHeader.diskNumStart === Constants.EF_ZIP64_OR_16) { + _centralHeader.diskNumStart = diskNumStart; + } + } + } - var remain = pattern.slice(n) + return { + get entryName() { + return decoder.decode(_entryName); + }, + get rawEntryName() { + return _entryName; + }, + set entryName(val) { + _entryName = Utils.toBuffer(val, decoder.encode); + var lastChar = _entryName[_entryName.length - 1]; + _isDirectory = lastChar === 47 || lastChar === 92; + _centralHeader.fileNameLength = _entryName.length; + }, - // get the list of entries. - var read - if (prefix === null) - read = '.' - else if (isAbsolute(prefix) || - isAbsolute(pattern.map(function (p) { - return typeof p === 'string' ? p : '[*]' - }).join('/'))) { - if (!prefix || !isAbsolute(prefix)) - prefix = '/' + prefix - read = prefix - } else - read = prefix + get efs() { + if (typeof _efs === "function") { + return _efs(this.entryName); + } else { + return _efs; + } + }, - var abs = this._makeAbs(read) + get extra() { + return _extra; + }, + set extra(val) { + _extra = val; + _centralHeader.extraLength = val.length; + parseExtra(val); + }, - //if ignored, skip processing - if (childrenIgnored(this, read)) - return + get comment() { + return decoder.decode(_comment); + }, + set comment(val) { + _comment = Utils.toBuffer(val, decoder.encode); + _centralHeader.commentLength = _comment.length; + if (_comment.length > 0xffff) throw Utils.Errors.COMMENT_TOO_LONG(); + }, - var isGlobStar = remain[0] === minimatch.GLOBSTAR - if (isGlobStar) - this._processGlobStar(prefix, read, abs, remain, index, inGlobStar) - else - this._processReaddir(prefix, read, abs, remain, index, inGlobStar) -} + get name() { + var n = decoder.decode(_entryName); + return _isDirectory + ? n + .substr(n.length - 1) + .split("/") + .pop() + : n.split("/").pop(); + }, + get isDirectory() { + return _isDirectory; + }, + getCompressedData: function () { + return compress(false, null); + }, -GlobSync.prototype._processReaddir = function (prefix, read, abs, remain, index, inGlobStar) { - var entries = this._readdir(abs, inGlobStar) + getCompressedDataAsync: function (/*Function*/ callback) { + compress(true, callback); + }, - // if the abs isn't a dir, then nothing can match! - if (!entries) - return + setData: function (value) { + uncompressedData = Utils.toBuffer(value, Utils.decoder.encode); + if (!_isDirectory && uncompressedData.length) { + _centralHeader.size = uncompressedData.length; + _centralHeader.method = Utils.Constants.DEFLATED; + _centralHeader.crc = Utils.crc32(value); + _centralHeader.changed = true; + } else { + // folders and blank files should be stored + _centralHeader.method = Utils.Constants.STORED; + } + }, - // It will only match dot entries if it starts with a dot, or if - // dot is set. Stuff like @(.foo|.bar) isn't allowed. - var pn = remain[0] - var negate = !!this.minimatch.negate - var rawGlob = pn._glob - var dotOk = this.dot || rawGlob.charAt(0) === '.' - - var matchedEntries = [] - for (var i = 0; i < entries.length; i++) { - var e = entries[i] - if (e.charAt(0) !== '.' || dotOk) { - var m - if (negate && !prefix) { - m = !e.match(pn) - } else { - m = e.match(pn) - } - if (m) - matchedEntries.push(e) - } - } + getData: function (pass) { + if (_centralHeader.changed) { + return uncompressedData; + } else { + return decompress(false, null, pass); + } + }, - var len = matchedEntries.length - // If there are no matched entries, then nothing matches. - if (len === 0) - return + getDataAsync: function (/*Function*/ callback, pass) { + if (_centralHeader.changed) { + callback(uncompressedData); + } else { + decompress(true, callback, pass); + } + }, - // if this is the last remaining pattern bit, then no need for - // an additional stat *unless* the user has specified mark or - // stat explicitly. We know they exist, since readdir returned - // them. + set attr(attr) { + _centralHeader.attr = attr; + }, + get attr() { + return _centralHeader.attr; + }, - if (remain.length === 1 && !this.mark && !this.stat) { - if (!this.matches[index]) - this.matches[index] = Object.create(null) + set header(/*Buffer*/ data) { + _centralHeader.loadFromBinary(data); + }, - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - if (prefix) { - if (prefix.slice(-1) !== '/') - e = prefix + '/' + e - else - e = prefix + e - } + get header() { + return _centralHeader; + }, - if (e.charAt(0) === '/' && !this.nomount) { - e = path.join(this.root, e) - } - this._emitMatch(index, e) - } - // This was the last one, and no stats were needed - return - } + packCentralHeader: function () { + _centralHeader.flags_efs = this.efs; + _centralHeader.extraLength = _extra.length; + // 1. create header (buffer) + var header = _centralHeader.centralHeaderToBinary(); + var addpos = Utils.Constants.CENHDR; + // 2. add file name + _entryName.copy(header, addpos); + addpos += _entryName.length; + // 3. add extra data + _extra.copy(header, addpos); + addpos += _centralHeader.extraLength; + // 4. add file comment + _comment.copy(header, addpos); + return header; + }, - // now test all matched entries as stand-ins for that part - // of the pattern. - remain.shift() - for (var i = 0; i < len; i ++) { - var e = matchedEntries[i] - var newPattern - if (prefix) - newPattern = [prefix, e] - else - newPattern = [e] - this._process(newPattern.concat(remain), index, inGlobStar) - } -} + packLocalHeader: function () { + let addpos = 0; + _centralHeader.flags_efs = this.efs; + _centralHeader.extraLocalLength = _extralocal.length; + // 1. construct local header Buffer + const localHeaderBuf = _centralHeader.localHeaderToBinary(); + // 2. localHeader - crate header buffer + const localHeader = Buffer.alloc(localHeaderBuf.length + _entryName.length + _centralHeader.extraLocalLength); + // 2.1 add localheader + localHeaderBuf.copy(localHeader, addpos); + addpos += localHeaderBuf.length; + // 2.2 add file name + _entryName.copy(localHeader, addpos); + addpos += _entryName.length; + // 2.3 add extra field + _extralocal.copy(localHeader, addpos); + addpos += _extralocal.length; + return localHeader; + }, -GlobSync.prototype._emitMatch = function (index, e) { - if (isIgnored(this, e)) - return + toJSON: function () { + const bytes = function (nr) { + return "<" + ((nr && nr.length + " bytes buffer") || "null") + ">"; + }; - var abs = this._makeAbs(e) + return { + entryName: this.entryName, + name: this.name, + comment: this.comment, + isDirectory: this.isDirectory, + header: _centralHeader.toJSON(), + compressedData: bytes(input), + data: bytes(uncompressedData) + }; + }, - if (this.mark) - e = this._mark(e) + toString: function () { + return JSON.stringify(this.toJSON(), null, "\t"); + } + }; +}; - if (this.absolute) { - e = abs - } - if (this.matches[index][e]) - return +/***/ }), - if (this.nodir) { - var c = this.cache[abs] - if (c === 'DIR' || Array.isArray(c)) - return - } +/***/ 7744: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.matches[index][e] = true +const ZipEntry = __nccwpck_require__(4057); +const Headers = __nccwpck_require__(4958); +const Utils = __nccwpck_require__(5182); - if (this.stat) - this._stat(e) -} +module.exports = function (/*Buffer|null*/ inBuffer, /** object */ options) { + var entryList = [], + entryTable = {}, + _comment = Buffer.alloc(0), + mainHeader = new Headers.MainHeader(), + loadedEntries = false; + var password = null; + const temporary = new Set(); + // assign options + const opts = options; -GlobSync.prototype._readdirInGlobStar = function (abs) { - // follow all symlinked directories forever - // just proceed as if this is a non-globstar situation - if (this.follow) - return this._readdir(abs, false) + const { noSort, decoder } = opts; - var entries - var lstat - var stat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er.code === 'ENOENT') { - // lstat failed, doesn't exist - return null + if (inBuffer) { + // is a memory buffer + readMainHeader(opts.readEntries); + } else { + // none. is a new file + loadedEntries = true; } - } - var isSym = lstat && lstat.isSymbolicLink() - this.symlinks[abs] = isSym + function makeTemporaryFolders() { + const foldersList = new Set(); - // If it's not a symlink or a dir, then it's definitely a regular file. - // don't bother doing a readdir in that case. - if (!isSym && lstat && !lstat.isDirectory()) - this.cache[abs] = 'FILE' - else - entries = this._readdir(abs, false) + // Make list of all folders in file + for (const elem of Object.keys(entryTable)) { + const elements = elem.split("/"); + elements.pop(); // filename + if (!elements.length) continue; // no folders + for (let i = 0; i < elements.length; i++) { + const sub = elements.slice(0, i + 1).join("/") + "/"; + foldersList.add(sub); + } + } - return entries -} + // create missing folders as temporary + for (const elem of foldersList) { + if (!(elem in entryTable)) { + const tempfolder = new ZipEntry(opts); + tempfolder.entryName = elem; + tempfolder.attr = 0x10; + tempfolder.temporary = true; + entryList.push(tempfolder); + entryTable[tempfolder.entryName] = tempfolder; + temporary.add(tempfolder); + } + } + } -GlobSync.prototype._readdir = function (abs, inGlobStar) { - var entries + function readEntries() { + loadedEntries = true; + entryTable = {}; + if (mainHeader.diskEntries > (inBuffer.length - mainHeader.offset) / Utils.Constants.CENHDR) { + throw Utils.Errors.DISK_ENTRY_TOO_LARGE(); + } + entryList = new Array(mainHeader.diskEntries); // total number of entries + var index = mainHeader.offset; // offset of first CEN header + for (var i = 0; i < entryList.length; i++) { + var tmp = index, + entry = new ZipEntry(opts, inBuffer); + entry.header = inBuffer.slice(tmp, (tmp += Utils.Constants.CENHDR)); - if (inGlobStar && !ownProp(this.symlinks, abs)) - return this._readdirInGlobStar(abs) + entry.entryName = inBuffer.slice(tmp, (tmp += entry.header.fileNameLength)); - if (ownProp(this.cache, abs)) { - var c = this.cache[abs] - if (!c || c === 'FILE') - return null + if (entry.header.extraLength) { + entry.extra = inBuffer.slice(tmp, (tmp += entry.header.extraLength)); + } - if (Array.isArray(c)) - return c - } + if (entry.header.commentLength) entry.comment = inBuffer.slice(tmp, tmp + entry.header.commentLength); - try { - return this._readdirEntries(abs, this.fs.readdirSync(abs)) - } catch (er) { - this._readdirError(abs, er) - return null - } -} + index += entry.header.centralHeaderSize; -GlobSync.prototype._readdirEntries = function (abs, entries) { - // if we haven't asked to stat everything, then just - // assume that everything in there exists, so we can avoid - // having to stat it a second time. - if (!this.mark && !this.stat) { - for (var i = 0; i < entries.length; i ++) { - var e = entries[i] - if (abs === '/') - e = abs + e - else - e = abs + '/' + e - this.cache[e] = true + entryList[i] = entry; + entryTable[entry.entryName] = entry; + } + temporary.clear(); + makeTemporaryFolders(); } - } - this.cache[abs] = entries + function readMainHeader(/*Boolean*/ readNow) { + var i = inBuffer.length - Utils.Constants.ENDHDR, // END header size + max = Math.max(0, i - 0xffff), // 0xFFFF is the max zip file comment length + n = max, + endStart = inBuffer.length, + endOffset = -1, // Start offset of the END header + commentEnd = 0; - // mark and cache dir-ness - return entries -} + // option to search header form entire file + const trailingSpace = typeof opts.trailingSpace === "boolean" ? opts.trailingSpace : false; + if (trailingSpace) max = 0; -GlobSync.prototype._readdirError = function (f, er) { - // handle errors, and cache the information - switch (er.code) { - case 'ENOTSUP': // https://github.com/isaacs/node-glob/issues/205 - case 'ENOTDIR': // totally normal. means it *does* exist. - var abs = this._makeAbs(f) - this.cache[abs] = 'FILE' - if (abs === this.cwdAbs) { - var error = new Error(er.code + ' invalid cwd ' + this.cwd) - error.path = this.cwd - error.code = er.code - throw error - } - break + for (i; i >= n; i--) { + if (inBuffer[i] !== 0x50) continue; // quick check that the byte is 'P' + if (inBuffer.readUInt32LE(i) === Utils.Constants.ENDSIG) { + // "PK\005\006" + endOffset = i; + commentEnd = i; + endStart = i + Utils.Constants.ENDHDR; + // We already found a regular signature, let's look just a bit further to check if there's any zip64 signature + n = i - Utils.Constants.END64HDR; + continue; + } - case 'ENOENT': // not terribly unusual - case 'ELOOP': - case 'ENAMETOOLONG': - case 'UNKNOWN': - this.cache[this._makeAbs(f)] = false - break + if (inBuffer.readUInt32LE(i) === Utils.Constants.END64SIG) { + // Found a zip64 signature, let's continue reading the whole zip64 record + n = max; + continue; + } - default: // some unusual error. Treat as failure. - this.cache[this._makeAbs(f)] = false - if (this.strict) - throw er - if (!this.silent) - console.error('glob error', er) - break - } -} + if (inBuffer.readUInt32LE(i) === Utils.Constants.ZIP64SIG) { + // Found the zip64 record, let's determine it's size + endOffset = i; + endStart = i + Utils.readBigUInt64LE(inBuffer, i + Utils.Constants.ZIP64SIZE) + Utils.Constants.ZIP64LEAD; + break; + } + } -GlobSync.prototype._processGlobStar = function (prefix, read, abs, remain, index, inGlobStar) { + if (endOffset == -1) throw Utils.Errors.INVALID_FORMAT(); - var entries = this._readdir(abs, inGlobStar) + mainHeader.loadFromBinary(inBuffer.slice(endOffset, endStart)); + if (mainHeader.commentLength) { + _comment = inBuffer.slice(commentEnd + Utils.Constants.ENDHDR); + } + if (readNow) readEntries(); + } - // no entries means not a dir, so it can never have matches - // foo.txt/** doesn't match foo.txt - if (!entries) - return + function sortEntries() { + if (entryList.length > 1 && !noSort) { + entryList.sort((a, b) => a.entryName.toLowerCase().localeCompare(b.entryName.toLowerCase())); + } + } - // test without the globstar, and with every child both below - // and replacing the globstar. - var remainWithoutGlobStar = remain.slice(1) - var gspref = prefix ? [ prefix ] : [] - var noGlobStar = gspref.concat(remainWithoutGlobStar) + return { + /** + * Returns an array of ZipEntry objects existent in the current opened archive + * @return Array + */ + get entries() { + if (!loadedEntries) { + readEntries(); + } + return entryList.filter((e) => !temporary.has(e)); + }, - // the noGlobStar pattern exits the inGlobStar state - this._process(noGlobStar, index, false) + /** + * Archive comment + * @return {String} + */ + get comment() { + return decoder.decode(_comment); + }, + set comment(val) { + _comment = Utils.toBuffer(val, decoder.encode); + mainHeader.commentLength = _comment.length; + }, - var len = entries.length - var isSym = this.symlinks[abs] + getEntryCount: function () { + if (!loadedEntries) { + return mainHeader.diskEntries; + } - // If it's a symlink, and we're in a globstar, then stop - if (isSym && inGlobStar) - return + return entryList.length; + }, - for (var i = 0; i < len; i++) { - var e = entries[i] - if (e.charAt(0) === '.' && !this.dot) - continue + forEach: function (callback) { + this.entries.forEach(callback); + }, - // these two cases enter the inGlobStar state - var instead = gspref.concat(entries[i], remainWithoutGlobStar) - this._process(instead, index, true) + /** + * Returns a reference to the entry with the given name or null if entry is inexistent + * + * @param entryName + * @return ZipEntry + */ + getEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + return entryTable[entryName] || null; + }, - var below = gspref.concat(entries[i], remain) - this._process(below, index, true) - } -} + /** + * Adds the given entry to the entry list + * + * @param entry + */ + setEntry: function (/*ZipEntry*/ entry) { + if (!loadedEntries) { + readEntries(); + } + entryList.push(entry); + entryTable[entry.entryName] = entry; + mainHeader.totalEntries = entryList.length; + }, -GlobSync.prototype._processSimple = function (prefix, index) { - // XXX review this. Shouldn't it be doing the mounting etc - // before doing stat? kinda weird? - var exists = this._stat(prefix) + /** + * Removes the file with the given name from the entry list. + * + * If the entry is a directory, then all nested files and directories will be removed + * @param entryName + * @returns {void} + */ + deleteFile: function (/*String*/ entryName, withsubfolders = true) { + if (!loadedEntries) { + readEntries(); + } + const entry = entryTable[entryName]; + const list = this.getEntryChildren(entry, withsubfolders).map((child) => child.entryName); - if (!this.matches[index]) - this.matches[index] = Object.create(null) + list.forEach(this.deleteEntry); + }, - // If it doesn't exist, then just mark the lack of results - if (!exists) - return + /** + * Removes the entry with the given name from the entry list. + * + * @param {string} entryName + * @returns {void} + */ + deleteEntry: function (/*String*/ entryName) { + if (!loadedEntries) { + readEntries(); + } + const entry = entryTable[entryName]; + const index = entryList.indexOf(entry); + if (index >= 0) { + entryList.splice(index, 1); + delete entryTable[entryName]; + mainHeader.totalEntries = entryList.length; + } + }, - if (prefix && isAbsolute(prefix) && !this.nomount) { - var trail = /[\/\\]$/.test(prefix) - if (prefix.charAt(0) === '/') { - prefix = path.join(this.root, prefix) - } else { - prefix = path.resolve(this.root, prefix) - if (trail) - prefix += '/' - } - } + /** + * Iterates and returns all nested files and directories of the given entry + * + * @param entry + * @return Array + */ + getEntryChildren: function (/*ZipEntry*/ entry, subfolders = true) { + if (!loadedEntries) { + readEntries(); + } + if (typeof entry === "object") { + if (entry.isDirectory && subfolders) { + const list = []; + const name = entry.entryName; + + for (const zipEntry of entryList) { + if (zipEntry.entryName.startsWith(name)) { + list.push(zipEntry); + } + } + return list; + } else { + return [entry]; + } + } + return []; + }, - if (process.platform === 'win32') - prefix = prefix.replace(/\\/g, '/') + /** + * How many child elements entry has + * + * @param {ZipEntry} entry + * @return {integer} + */ + getChildCount: function (entry) { + if (entry && entry.isDirectory) { + const list = this.getEntryChildren(entry); + return list.includes(entry) ? list.length - 1 : list.length; + } + return 0; + }, - // Mark this as a match - this._emitMatch(index, prefix) -} + /** + * Returns the zip file + * + * @return Buffer + */ + compressToBuffer: function () { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); -// Returns either 'DIR', 'FILE', or false -GlobSync.prototype._stat = function (f) { - var abs = this._makeAbs(f) - var needDir = f.slice(-1) === '/' + const dataBlock = []; + const headerBlocks = []; + let totalSize = 0; + let dindex = 0; - if (f.length > this.maxLength) - return false + mainHeader.size = 0; + mainHeader.offset = 0; + let totalEntries = 0; - if (!this.stat && ownProp(this.cache, abs)) { - var c = this.cache[abs] + for (const entry of this.entries) { + // compress data and set local and entry header accordingly. Reason why is called first + const compressedData = entry.getCompressedData(); + entry.header.offset = dindex; - if (Array.isArray(c)) - c = 'DIR' + // 1. construct local header + const localHeader = entry.packLocalHeader(); - // It exists, but maybe not how we need it - if (!needDir || c === 'DIR') - return c + // 2. offsets + const dataLength = localHeader.length + compressedData.length; + dindex += dataLength; - if (needDir && c === 'FILE') - return false + // 3. store values in sequence + dataBlock.push(localHeader); + dataBlock.push(compressedData); - // otherwise we have to stat, because maybe c=true - // if we know it exists, but not what it is. - } + // 4. construct central header + const centralHeader = entry.packCentralHeader(); + headerBlocks.push(centralHeader); + // 5. update main header + mainHeader.size += centralHeader.length; + totalSize += dataLength + centralHeader.length; + totalEntries++; + } - var exists - var stat = this.statCache[abs] - if (!stat) { - var lstat - try { - lstat = this.fs.lstatSync(abs) - } catch (er) { - if (er && (er.code === 'ENOENT' || er.code === 'ENOTDIR')) { - this.statCache[abs] = false - return false - } - } + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + mainHeader.totalEntries = totalEntries; - if (lstat && lstat.isSymbolicLink()) { - try { - stat = this.fs.statSync(abs) - } catch (er) { - stat = lstat - } - } else { - stat = lstat - } - } + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + // write data blocks + for (const content of dataBlock) { + content.copy(outBuffer, dindex); + dindex += content.length; + } - this.statCache[abs] = stat + // write central directory entries + for (const content of headerBlocks) { + content.copy(outBuffer, dindex); + dindex += content.length; + } - var c = true - if (stat) - c = stat.isDirectory() ? 'DIR' : 'FILE' + // write main header + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } + mh.copy(outBuffer, dindex); - this.cache[abs] = this.cache[abs] || c + // Since we update entry and main header offsets, + // they are no longer valid and we have to reset content + // (Issue 64) - if (needDir && c === 'FILE') - return false + inBuffer = outBuffer; + loadedEntries = false; - return c -} + return outBuffer; + }, -GlobSync.prototype._mark = function (p) { - return common.mark(this, p) -} + toAsyncBuffer: function (/*Function*/ onSuccess, /*Function*/ onFail, /*Function*/ onItemStart, /*Function*/ onItemEnd) { + try { + if (!loadedEntries) { + readEntries(); + } + sortEntries(); -GlobSync.prototype._makeAbs = function (f) { - return common.makeAbs(this, f) -} + const dataBlock = []; + const centralHeaders = []; + let totalSize = 0; + let dindex = 0; + let totalEntries = 0; + mainHeader.size = 0; + mainHeader.offset = 0; -/***/ }), + const compress2Buffer = function (entryLists) { + if (entryLists.length > 0) { + const entry = entryLists.shift(); + const name = entry.entryName + entry.extra.toString(); + if (onItemStart) onItemStart(name); + entry.getCompressedDataAsync(function (compressedData) { + if (onItemEnd) onItemEnd(name); + entry.header.offset = dindex; -/***/ 2492: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + // 1. construct local header + const localHeader = entry.packLocalHeader(); -var wrappy = __nccwpck_require__(2940) -var reqs = Object.create(null) -var once = __nccwpck_require__(1223) + // 2. offsets + const dataLength = localHeader.length + compressedData.length; + dindex += dataLength; -module.exports = wrappy(inflight) + // 3. store values in sequence + dataBlock.push(localHeader); + dataBlock.push(compressedData); -function inflight (key, cb) { - if (reqs[key]) { - reqs[key].push(cb) - return null - } else { - reqs[key] = [cb] - return makeres(key) - } -} + // central header + const centalHeader = entry.packCentralHeader(); + centralHeaders.push(centalHeader); + mainHeader.size += centalHeader.length; + totalSize += dataLength + centalHeader.length; + totalEntries++; -function makeres (key) { - return once(function RES () { - var cbs = reqs[key] - var len = cbs.length - var args = slice(arguments) + compress2Buffer(entryLists); + }); + } else { + totalSize += mainHeader.mainHeaderSize; // also includes zip file comment length + // point to end of data and beginning of central directory first record + mainHeader.offset = dindex; + mainHeader.totalEntries = totalEntries; - // XXX It's somewhat ambiguous whether a new callback added in this - // pass should be queued for later execution if something in the - // list of callbacks throws, or if it should just be discarded. - // However, it's such an edge case that it hardly matters, and either - // choice is likely as surprising as the other. - // As it happens, we do go ahead and schedule it for later execution. - try { - for (var i = 0; i < len; i++) { - cbs[i].apply(null, args) - } - } finally { - if (cbs.length > len) { - // added more in the interim. - // de-zalgo, just in case, but don't call again. - cbs.splice(0, len) - process.nextTick(function () { - RES.apply(null, args) - }) - } else { - delete reqs[key] - } - } - }) -} + dindex = 0; + const outBuffer = Buffer.alloc(totalSize); + dataBlock.forEach(function (content) { + content.copy(outBuffer, dindex); // write data blocks + dindex += content.length; + }); + centralHeaders.forEach(function (content) { + content.copy(outBuffer, dindex); // write central directory entries + dindex += content.length; + }); -function slice (args) { - var length = args.length - var array = [] + const mh = mainHeader.toBinary(); + if (_comment) { + _comment.copy(mh, Utils.Constants.ENDHDR); // add zip file comment + } - for (var i = 0; i < length; i++) array[i] = args[i] - return array -} + mh.copy(outBuffer, dindex); // write main header + // Since we update entry and main header offsets, they are no + // longer valid and we have to reset content using our new buffer + // (Issue 64) -/***/ }), + inBuffer = outBuffer; + loadedEntries = false; -/***/ 4124: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + onSuccess(outBuffer); + } + }; -try { - var util = __nccwpck_require__(3837); - /* istanbul ignore next */ - if (typeof util.inherits !== 'function') throw ''; - module.exports = util.inherits; -} catch (e) { - /* istanbul ignore next */ - module.exports = __nccwpck_require__(8544); -} + compress2Buffer(Array.from(this.entries)); + } catch (e) { + onFail(e); + } + } + }; +}; /***/ }), -/***/ 8544: -/***/ ((module) => { +/***/ 1174: +/***/ (function(module) { -if (typeof Object.create === 'function') { - // implementation from standard node.js 'util' module - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - ctor.prototype = Object.create(superCtor.prototype, { - constructor: { - value: ctor, - enumerable: false, - writable: true, - configurable: true - } - }) - } - }; -} else { - // old school shim for old browsers - module.exports = function inherits(ctor, superCtor) { - if (superCtor) { - ctor.super_ = superCtor - var TempCtor = function () {} - TempCtor.prototype = superCtor.prototype - ctor.prototype = new TempCtor() - ctor.prototype.constructor = ctor - } - } -} +/** + * This file contains the Bottleneck library (MIT), compiled to ES2017, and without Clustering support. + * https://github.com/SGrondin/bottleneck + */ +(function (global, factory) { + true ? module.exports = factory() : + 0; +}(this, (function () { 'use strict'; + + var commonjsGlobal = typeof globalThis !== 'undefined' ? globalThis : typeof window !== 'undefined' ? window : typeof global !== 'undefined' ? global : typeof self !== 'undefined' ? self : {}; + + function getCjsExportFromNamespace (n) { + return n && n['default'] || n; + } + + var load = function(received, defaults, onto = {}) { + var k, ref, v; + for (k in defaults) { + v = defaults[k]; + onto[k] = (ref = received[k]) != null ? ref : v; + } + return onto; + }; + + var overwrite = function(received, defaults, onto = {}) { + var k, v; + for (k in received) { + v = received[k]; + if (defaults[k] !== void 0) { + onto[k] = v; + } + } + return onto; + }; + + var parser = { + load: load, + overwrite: overwrite + }; + + var DLList; + + DLList = class DLList { + constructor(incr, decr) { + this.incr = incr; + this.decr = decr; + this._first = null; + this._last = null; + this.length = 0; + } + + push(value) { + var node; + this.length++; + if (typeof this.incr === "function") { + this.incr(); + } + node = { + value, + prev: this._last, + next: null + }; + if (this._last != null) { + this._last.next = node; + this._last = node; + } else { + this._first = this._last = node; + } + return void 0; + } + + shift() { + var value; + if (this._first == null) { + return; + } else { + this.length--; + if (typeof this.decr === "function") { + this.decr(); + } + } + value = this._first.value; + if ((this._first = this._first.next) != null) { + this._first.prev = null; + } else { + this._last = null; + } + return value; + } + + first() { + if (this._first != null) { + return this._first.value; + } + } + + getArray() { + var node, ref, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, ref.value)); + } + return results; + } + + forEachShift(cb) { + var node; + node = this.shift(); + while (node != null) { + (cb(node), node = this.shift()); + } + return void 0; + } + + debug() { + var node, ref, ref1, ref2, results; + node = this._first; + results = []; + while (node != null) { + results.push((ref = node, node = node.next, { + value: ref.value, + prev: (ref1 = ref.prev) != null ? ref1.value : void 0, + next: (ref2 = ref.next) != null ? ref2.value : void 0 + })); + } + return results; + } + + }; + + var DLList_1 = DLList; + + var Events; + + Events = class Events { + constructor(instance) { + this.instance = instance; + this._events = {}; + if ((this.instance.on != null) || (this.instance.once != null) || (this.instance.removeAllListeners != null)) { + throw new Error("An Emitter already exists for this object"); + } + this.instance.on = (name, cb) => { + return this._addListener(name, "many", cb); + }; + this.instance.once = (name, cb) => { + return this._addListener(name, "once", cb); + }; + this.instance.removeAllListeners = (name = null) => { + if (name != null) { + return delete this._events[name]; + } else { + return this._events = {}; + } + }; + } + + _addListener(name, status, cb) { + var base; + if ((base = this._events)[name] == null) { + base[name] = []; + } + this._events[name].push({cb, status}); + return this.instance; + } + + listenerCount(name) { + if (this._events[name] != null) { + return this._events[name].length; + } else { + return 0; + } + } + + async trigger(name, ...args) { + var e, promises; + try { + if (name !== "debug") { + this.trigger("debug", `Event triggered: ${name}`, args); + } + if (this._events[name] == null) { + return; + } + this._events[name] = this._events[name].filter(function(listener) { + return listener.status !== "none"; + }); + promises = this._events[name].map(async(listener) => { + var e, returned; + if (listener.status === "none") { + return; + } + if (listener.status === "once") { + listener.status = "none"; + } + try { + returned = typeof listener.cb === "function" ? listener.cb(...args) : void 0; + if (typeof (returned != null ? returned.then : void 0) === "function") { + return (await returned); + } else { + return returned; + } + } catch (error) { + e = error; + { + this.trigger("error", e); + } + return null; + } + }); + return ((await Promise.all(promises))).find(function(x) { + return x != null; + }); + } catch (error) { + e = error; + { + this.trigger("error", e); + } + return null; + } + } + + }; + + var Events_1 = Events; + + var DLList$1, Events$1, Queues; + + DLList$1 = DLList_1; + + Events$1 = Events_1; + + Queues = class Queues { + constructor(num_priorities) { + var i; + this.Events = new Events$1(this); + this._length = 0; + this._lists = (function() { + var j, ref, results; + results = []; + for (i = j = 1, ref = num_priorities; (1 <= ref ? j <= ref : j >= ref); i = 1 <= ref ? ++j : --j) { + results.push(new DLList$1((() => { + return this.incr(); + }), (() => { + return this.decr(); + }))); + } + return results; + }).call(this); + } + + incr() { + if (this._length++ === 0) { + return this.Events.trigger("leftzero"); + } + } + + decr() { + if (--this._length === 0) { + return this.Events.trigger("zero"); + } + } + + push(job) { + return this._lists[job.options.priority].push(job); + } + + queued(priority) { + if (priority != null) { + return this._lists[priority].length; + } else { + return this._length; + } + } + + shiftAll(fn) { + return this._lists.forEach(function(list) { + return list.forEachShift(fn); + }); + } + + getFirst(arr = this._lists) { + var j, len, list; + for (j = 0, len = arr.length; j < len; j++) { + list = arr[j]; + if (list.length > 0) { + return list; + } + } + return []; + } + + shiftLastFrom(priority) { + return this.getFirst(this._lists.slice(priority).reverse()).shift(); + } + + }; + + var Queues_1 = Queues; + + var BottleneckError; + + BottleneckError = class BottleneckError extends Error {}; + + var BottleneckError_1 = BottleneckError; + + var BottleneckError$1, DEFAULT_PRIORITY, Job, NUM_PRIORITIES, parser$1; + + NUM_PRIORITIES = 10; + + DEFAULT_PRIORITY = 5; + + parser$1 = parser; + + BottleneckError$1 = BottleneckError_1; + + Job = class Job { + constructor(task, args, options, jobDefaults, rejectOnDrop, Events, _states, Promise) { + this.task = task; + this.args = args; + this.rejectOnDrop = rejectOnDrop; + this.Events = Events; + this._states = _states; + this.Promise = Promise; + this.options = parser$1.load(options, jobDefaults); + this.options.priority = this._sanitizePriority(this.options.priority); + if (this.options.id === jobDefaults.id) { + this.options.id = `${this.options.id}-${this._randomIndex()}`; + } + this.promise = new this.Promise((_resolve, _reject) => { + this._resolve = _resolve; + this._reject = _reject; + }); + this.retryCount = 0; + } + + _sanitizePriority(priority) { + var sProperty; + sProperty = ~~priority !== priority ? DEFAULT_PRIORITY : priority; + if (sProperty < 0) { + return 0; + } else if (sProperty > NUM_PRIORITIES - 1) { + return NUM_PRIORITIES - 1; + } else { + return sProperty; + } + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + doDrop({error, message = "This job has been dropped by Bottleneck"} = {}) { + if (this._states.remove(this.options.id)) { + if (this.rejectOnDrop) { + this._reject(error != null ? error : new BottleneckError$1(message)); + } + this.Events.trigger("dropped", {args: this.args, options: this.options, task: this.task, promise: this.promise}); + return true; + } else { + return false; + } + } + + _assertStatus(expected) { + var status; + status = this._states.jobStatus(this.options.id); + if (!(status === expected || (expected === "DONE" && status === null))) { + throw new BottleneckError$1(`Invalid job status ${status}, expected ${expected}. Please open an issue at https://github.com/SGrondin/bottleneck/issues`); + } + } + + doReceive() { + this._states.start(this.options.id); + return this.Events.trigger("received", {args: this.args, options: this.options}); + } + + doQueue(reachedHWM, blocked) { + this._assertStatus("RECEIVED"); + this._states.next(this.options.id); + return this.Events.trigger("queued", {args: this.args, options: this.options, reachedHWM, blocked}); + } + + doRun() { + if (this.retryCount === 0) { + this._assertStatus("QUEUED"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + return this.Events.trigger("scheduled", {args: this.args, options: this.options}); + } + + async doExecute(chained, clearGlobalState, run, free) { + var error, eventInfo, passed; + if (this.retryCount === 0) { + this._assertStatus("RUNNING"); + this._states.next(this.options.id); + } else { + this._assertStatus("EXECUTING"); + } + eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; + this.Events.trigger("executing", eventInfo); + try { + passed = (await (chained != null ? chained.schedule(this.options, this.task, ...this.args) : this.task(...this.args))); + if (clearGlobalState()) { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._resolve(passed); + } + } catch (error1) { + error = error1; + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + } + + doExpire(clearGlobalState, run, free) { + var error, eventInfo; + if (this._states.jobStatus(this.options.id === "RUNNING")) { + this._states.next(this.options.id); + } + this._assertStatus("EXECUTING"); + eventInfo = {args: this.args, options: this.options, retryCount: this.retryCount}; + error = new BottleneckError$1(`This job timed out after ${this.options.expiration} ms.`); + return this._onFailure(error, eventInfo, clearGlobalState, run, free); + } + + async _onFailure(error, eventInfo, clearGlobalState, run, free) { + var retry, retryAfter; + if (clearGlobalState()) { + retry = (await this.Events.trigger("failed", error, eventInfo)); + if (retry != null) { + retryAfter = ~~retry; + this.Events.trigger("retry", `Retrying ${this.options.id} after ${retryAfter} ms`, eventInfo); + this.retryCount++; + return run(retryAfter); + } else { + this.doDone(eventInfo); + await free(this.options, eventInfo); + this._assertStatus("DONE"); + return this._reject(error); + } + } + } + + doDone(eventInfo) { + this._assertStatus("EXECUTING"); + this._states.next(this.options.id); + return this.Events.trigger("done", eventInfo); + } + + }; + + var Job_1 = Job; + + var BottleneckError$2, LocalDatastore, parser$2; + + parser$2 = parser; + + BottleneckError$2 = BottleneckError_1; + + LocalDatastore = class LocalDatastore { + constructor(instance, storeOptions, storeInstanceOptions) { + this.instance = instance; + this.storeOptions = storeOptions; + this.clientId = this.instance._randomIndex(); + parser$2.load(storeInstanceOptions, storeInstanceOptions, this); + this._nextRequest = this._lastReservoirRefresh = this._lastReservoirIncrease = Date.now(); + this._running = 0; + this._done = 0; + this._unblockTime = 0; + this.ready = this.Promise.resolve(); + this.clients = {}; + this._startHeartbeat(); + } + + _startHeartbeat() { + var base; + if ((this.heartbeat == null) && (((this.storeOptions.reservoirRefreshInterval != null) && (this.storeOptions.reservoirRefreshAmount != null)) || ((this.storeOptions.reservoirIncreaseInterval != null) && (this.storeOptions.reservoirIncreaseAmount != null)))) { + return typeof (base = (this.heartbeat = setInterval(() => { + var amount, incr, maximum, now, reservoir; + now = Date.now(); + if ((this.storeOptions.reservoirRefreshInterval != null) && now >= this._lastReservoirRefresh + this.storeOptions.reservoirRefreshInterval) { + this._lastReservoirRefresh = now; + this.storeOptions.reservoir = this.storeOptions.reservoirRefreshAmount; + this.instance._drainAll(this.computeCapacity()); + } + if ((this.storeOptions.reservoirIncreaseInterval != null) && now >= this._lastReservoirIncrease + this.storeOptions.reservoirIncreaseInterval) { + ({ + reservoirIncreaseAmount: amount, + reservoirIncreaseMaximum: maximum, + reservoir + } = this.storeOptions); + this._lastReservoirIncrease = now; + incr = maximum != null ? Math.min(amount, maximum - reservoir) : amount; + if (incr > 0) { + this.storeOptions.reservoir += incr; + return this.instance._drainAll(this.computeCapacity()); + } + } + }, this.heartbeatInterval))).unref === "function" ? base.unref() : void 0; + } else { + return clearInterval(this.heartbeat); + } + } + + async __publish__(message) { + await this.yieldLoop(); + return this.instance.Events.trigger("message", message.toString()); + } + + async __disconnect__(flush) { + await this.yieldLoop(); + clearInterval(this.heartbeat); + return this.Promise.resolve(); + } + + yieldLoop(t = 0) { + return new this.Promise(function(resolve, reject) { + return setTimeout(resolve, t); + }); + } + + computePenalty() { + var ref; + return (ref = this.storeOptions.penalty) != null ? ref : (15 * this.storeOptions.minTime) || 5000; + } + + async __updateSettings__(options) { + await this.yieldLoop(); + parser$2.overwrite(options, options, this.storeOptions); + this._startHeartbeat(); + this.instance._drainAll(this.computeCapacity()); + return true; + } + + async __running__() { + await this.yieldLoop(); + return this._running; + } + + async __queued__() { + await this.yieldLoop(); + return this.instance.queued(); + } + + async __done__() { + await this.yieldLoop(); + return this._done; + } + + async __groupCheck__(time) { + await this.yieldLoop(); + return (this._nextRequest + this.timeout) < time; + } + + computeCapacity() { + var maxConcurrent, reservoir; + ({maxConcurrent, reservoir} = this.storeOptions); + if ((maxConcurrent != null) && (reservoir != null)) { + return Math.min(maxConcurrent - this._running, reservoir); + } else if (maxConcurrent != null) { + return maxConcurrent - this._running; + } else if (reservoir != null) { + return reservoir; + } else { + return null; + } + } + + conditionsCheck(weight) { + var capacity; + capacity = this.computeCapacity(); + return (capacity == null) || weight <= capacity; + } + + async __incrementReservoir__(incr) { + var reservoir; + await this.yieldLoop(); + reservoir = this.storeOptions.reservoir += incr; + this.instance._drainAll(this.computeCapacity()); + return reservoir; + } + + async __currentReservoir__() { + await this.yieldLoop(); + return this.storeOptions.reservoir; + } + + isBlocked(now) { + return this._unblockTime >= now; + } + + check(weight, now) { + return this.conditionsCheck(weight) && (this._nextRequest - now) <= 0; + } + + async __check__(weight) { + var now; + await this.yieldLoop(); + now = Date.now(); + return this.check(weight, now); + } + + async __register__(index, weight, expiration) { + var now, wait; + await this.yieldLoop(); + now = Date.now(); + if (this.conditionsCheck(weight)) { + this._running += weight; + if (this.storeOptions.reservoir != null) { + this.storeOptions.reservoir -= weight; + } + wait = Math.max(this._nextRequest - now, 0); + this._nextRequest = now + wait + this.storeOptions.minTime; + return { + success: true, + wait, + reservoir: this.storeOptions.reservoir + }; + } else { + return { + success: false + }; + } + } + + strategyIsBlock() { + return this.storeOptions.strategy === 3; + } + + async __submit__(queueLength, weight) { + var blocked, now, reachedHWM; + await this.yieldLoop(); + if ((this.storeOptions.maxConcurrent != null) && weight > this.storeOptions.maxConcurrent) { + throw new BottleneckError$2(`Impossible to add a job having a weight of ${weight} to a limiter having a maxConcurrent setting of ${this.storeOptions.maxConcurrent}`); + } + now = Date.now(); + reachedHWM = (this.storeOptions.highWater != null) && queueLength === this.storeOptions.highWater && !this.check(weight, now); + blocked = this.strategyIsBlock() && (reachedHWM || this.isBlocked(now)); + if (blocked) { + this._unblockTime = now + this.computePenalty(); + this._nextRequest = this._unblockTime + this.storeOptions.minTime; + this.instance._dropAllQueued(); + } + return { + reachedHWM, + blocked, + strategy: this.storeOptions.strategy + }; + } + + async __free__(index, weight) { + await this.yieldLoop(); + this._running -= weight; + this._done += weight; + this.instance._drainAll(this.computeCapacity()); + return { + running: this._running + }; + } + + }; + + var LocalDatastore_1 = LocalDatastore; + + var BottleneckError$3, States; + + BottleneckError$3 = BottleneckError_1; + + States = class States { + constructor(status1) { + this.status = status1; + this._jobs = {}; + this.counts = this.status.map(function() { + return 0; + }); + } + + next(id) { + var current, next; + current = this._jobs[id]; + next = current + 1; + if ((current != null) && next < this.status.length) { + this.counts[current]--; + this.counts[next]++; + return this._jobs[id]++; + } else if (current != null) { + this.counts[current]--; + return delete this._jobs[id]; + } + } + + start(id) { + var initial; + initial = 0; + this._jobs[id] = initial; + return this.counts[initial]++; + } + + remove(id) { + var current; + current = this._jobs[id]; + if (current != null) { + this.counts[current]--; + delete this._jobs[id]; + } + return current != null; + } + + jobStatus(id) { + var ref; + return (ref = this.status[this._jobs[id]]) != null ? ref : null; + } + + statusJobs(status) { + var k, pos, ref, results, v; + if (status != null) { + pos = this.status.indexOf(status); + if (pos < 0) { + throw new BottleneckError$3(`status must be one of ${this.status.join(', ')}`); + } + ref = this._jobs; + results = []; + for (k in ref) { + v = ref[k]; + if (v === pos) { + results.push(k); + } + } + return results; + } else { + return Object.keys(this._jobs); + } + } + + statusCounts() { + return this.counts.reduce(((acc, v, i) => { + acc[this.status[i]] = v; + return acc; + }), {}); + } + + }; + + var States_1 = States; + + var DLList$2, Sync; + + DLList$2 = DLList_1; + + Sync = class Sync { + constructor(name, Promise) { + this.schedule = this.schedule.bind(this); + this.name = name; + this.Promise = Promise; + this._running = 0; + this._queue = new DLList$2(); + } + + isEmpty() { + return this._queue.length === 0; + } + + async _tryToRun() { + var args, cb, error, reject, resolve, returned, task; + if ((this._running < 1) && this._queue.length > 0) { + this._running++; + ({task, args, resolve, reject} = this._queue.shift()); + cb = (await (async function() { + try { + returned = (await task(...args)); + return function() { + return resolve(returned); + }; + } catch (error1) { + error = error1; + return function() { + return reject(error); + }; + } + })()); + this._running--; + this._tryToRun(); + return cb(); + } + } + + schedule(task, ...args) { + var promise, reject, resolve; + resolve = reject = null; + promise = new this.Promise(function(_resolve, _reject) { + resolve = _resolve; + return reject = _reject; + }); + this._queue.push({task, args, resolve, reject}); + this._tryToRun(); + return promise; + } + + }; + + var Sync_1 = Sync; + + var version = "2.19.5"; + var version$1 = { + version: version + }; + + var version$2 = /*#__PURE__*/Object.freeze({ + version: version, + default: version$1 + }); + + var require$$2 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$3 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$4 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var Events$2, Group, IORedisConnection$1, RedisConnection$1, Scripts$1, parser$3; + + parser$3 = parser; + + Events$2 = Events_1; + + RedisConnection$1 = require$$2; + + IORedisConnection$1 = require$$3; + + Scripts$1 = require$$4; + + Group = (function() { + class Group { + constructor(limiterOptions = {}) { + this.deleteKey = this.deleteKey.bind(this); + this.limiterOptions = limiterOptions; + parser$3.load(this.limiterOptions, this.defaults, this); + this.Events = new Events$2(this); + this.instances = {}; + this.Bottleneck = Bottleneck_1; + this._startAutoCleanup(); + this.sharedConnection = this.connection != null; + if (this.connection == null) { + if (this.limiterOptions.datastore === "redis") { + this.connection = new RedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); + } else if (this.limiterOptions.datastore === "ioredis") { + this.connection = new IORedisConnection$1(Object.assign({}, this.limiterOptions, {Events: this.Events})); + } + } + } + + key(key = "") { + var ref; + return (ref = this.instances[key]) != null ? ref : (() => { + var limiter; + limiter = this.instances[key] = new this.Bottleneck(Object.assign(this.limiterOptions, { + id: `${this.id}-${key}`, + timeout: this.timeout, + connection: this.connection + })); + this.Events.trigger("created", limiter, key); + return limiter; + })(); + } + + async deleteKey(key = "") { + var deleted, instance; + instance = this.instances[key]; + if (this.connection) { + deleted = (await this.connection.__runCommand__(['del', ...Scripts$1.allKeys(`${this.id}-${key}`)])); + } + if (instance != null) { + delete this.instances[key]; + await instance.disconnect(); + } + return (instance != null) || deleted > 0; + } + + limiters() { + var k, ref, results, v; + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + results.push({ + key: k, + limiter: v + }); + } + return results; + } + + keys() { + return Object.keys(this.instances); + } + + async clusterKeys() { + var cursor, end, found, i, k, keys, len, next, start; + if (this.connection == null) { + return this.Promise.resolve(this.keys()); + } + keys = []; + cursor = null; + start = `b_${this.id}-`.length; + end = "_settings".length; + while (cursor !== 0) { + [next, found] = (await this.connection.__runCommand__(["scan", cursor != null ? cursor : 0, "match", `b_${this.id}-*_settings`, "count", 10000])); + cursor = ~~next; + for (i = 0, len = found.length; i < len; i++) { + k = found[i]; + keys.push(k.slice(start, -end)); + } + } + return keys; + } + + _startAutoCleanup() { + var base; + clearInterval(this.interval); + return typeof (base = (this.interval = setInterval(async() => { + var e, k, ref, results, time, v; + time = Date.now(); + ref = this.instances; + results = []; + for (k in ref) { + v = ref[k]; + try { + if ((await v._store.__groupCheck__(time))) { + results.push(this.deleteKey(k)); + } else { + results.push(void 0); + } + } catch (error) { + e = error; + results.push(v.Events.trigger("error", e)); + } + } + return results; + }, this.timeout / 2))).unref === "function" ? base.unref() : void 0; + } + + updateSettings(options = {}) { + parser$3.overwrite(options, this.defaults, this); + parser$3.overwrite(options, options, this.limiterOptions); + if (options.timeout != null) { + return this._startAutoCleanup(); + } + } + + disconnect(flush = true) { + var ref; + if (!this.sharedConnection) { + return (ref = this.connection) != null ? ref.disconnect(flush) : void 0; + } + } + + } + Group.prototype.defaults = { + timeout: 1000 * 60 * 5, + connection: null, + Promise: Promise, + id: "group-key" + }; + + return Group; + + }).call(commonjsGlobal); + + var Group_1 = Group; + + var Batcher, Events$3, parser$4; + + parser$4 = parser; + + Events$3 = Events_1; + + Batcher = (function() { + class Batcher { + constructor(options = {}) { + this.options = options; + parser$4.load(this.options, this.defaults, this); + this.Events = new Events$3(this); + this._arr = []; + this._resetPromise(); + this._lastFlush = Date.now(); + } + + _resetPromise() { + return this._promise = new this.Promise((res, rej) => { + return this._resolve = res; + }); + } + + _flush() { + clearTimeout(this._timeout); + this._lastFlush = Date.now(); + this._resolve(); + this.Events.trigger("batch", this._arr); + this._arr = []; + return this._resetPromise(); + } + + add(data) { + var ret; + this._arr.push(data); + ret = this._promise; + if (this._arr.length === this.maxSize) { + this._flush(); + } else if ((this.maxTime != null) && this._arr.length === 1) { + this._timeout = setTimeout(() => { + return this._flush(); + }, this.maxTime); + } + return ret; + } + + } + Batcher.prototype.defaults = { + maxTime: null, + maxSize: null, + Promise: Promise + }; + + return Batcher; + + }).call(commonjsGlobal); + + var Batcher_1 = Batcher; + + var require$$4$1 = () => console.log('You must import the full version of Bottleneck in order to use this feature.'); + + var require$$8 = getCjsExportFromNamespace(version$2); + + var Bottleneck, DEFAULT_PRIORITY$1, Events$4, Job$1, LocalDatastore$1, NUM_PRIORITIES$1, Queues$1, RedisDatastore$1, States$1, Sync$1, parser$5, + splice = [].splice; + + NUM_PRIORITIES$1 = 10; + + DEFAULT_PRIORITY$1 = 5; + + parser$5 = parser; + + Queues$1 = Queues_1; + + Job$1 = Job_1; + + LocalDatastore$1 = LocalDatastore_1; + + RedisDatastore$1 = require$$4$1; + + Events$4 = Events_1; + + States$1 = States_1; + + Sync$1 = Sync_1; + + Bottleneck = (function() { + class Bottleneck { + constructor(options = {}, ...invalid) { + var storeInstanceOptions, storeOptions; + this._addToQueue = this._addToQueue.bind(this); + this._validateOptions(options, invalid); + parser$5.load(options, this.instanceDefaults, this); + this._queues = new Queues$1(NUM_PRIORITIES$1); + this._scheduled = {}; + this._states = new States$1(["RECEIVED", "QUEUED", "RUNNING", "EXECUTING"].concat(this.trackDoneStatus ? ["DONE"] : [])); + this._limiter = null; + this.Events = new Events$4(this); + this._submitLock = new Sync$1("submit", this.Promise); + this._registerLock = new Sync$1("register", this.Promise); + storeOptions = parser$5.load(options, this.storeDefaults, {}); + this._store = (function() { + if (this.datastore === "redis" || this.datastore === "ioredis" || (this.connection != null)) { + storeInstanceOptions = parser$5.load(options, this.redisStoreDefaults, {}); + return new RedisDatastore$1(this, storeOptions, storeInstanceOptions); + } else if (this.datastore === "local") { + storeInstanceOptions = parser$5.load(options, this.localStoreDefaults, {}); + return new LocalDatastore$1(this, storeOptions, storeInstanceOptions); + } else { + throw new Bottleneck.prototype.BottleneckError(`Invalid datastore type: ${this.datastore}`); + } + }).call(this); + this._queues.on("leftzero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.ref === "function" ? ref.ref() : void 0 : void 0; + }); + this._queues.on("zero", () => { + var ref; + return (ref = this._store.heartbeat) != null ? typeof ref.unref === "function" ? ref.unref() : void 0 : void 0; + }); + } + + _validateOptions(options, invalid) { + if (!((options != null) && typeof options === "object" && invalid.length === 0)) { + throw new Bottleneck.prototype.BottleneckError("Bottleneck v2 takes a single object argument. Refer to https://github.com/SGrondin/bottleneck#upgrading-to-v2 if you're upgrading from Bottleneck v1."); + } + } + + ready() { + return this._store.ready; + } + + clients() { + return this._store.clients; + } + + channel() { + return `b_${this.id}`; + } + + channel_client() { + return `b_${this.id}_${this._store.clientId}`; + } + + publish(message) { + return this._store.__publish__(message); + } + + disconnect(flush = true) { + return this._store.__disconnect__(flush); + } + + chain(_limiter) { + this._limiter = _limiter; + return this; + } + + queued(priority) { + return this._queues.queued(priority); + } + + clusterQueued() { + return this._store.__queued__(); + } + + empty() { + return this.queued() === 0 && this._submitLock.isEmpty(); + } + + running() { + return this._store.__running__(); + } + + done() { + return this._store.__done__(); + } + + jobStatus(id) { + return this._states.jobStatus(id); + } + + jobs(status) { + return this._states.statusJobs(status); + } + + counts() { + return this._states.statusCounts(); + } + + _randomIndex() { + return Math.random().toString(36).slice(2); + } + + check(weight = 1) { + return this._store.__check__(weight); + } + + _clearGlobalState(index) { + if (this._scheduled[index] != null) { + clearTimeout(this._scheduled[index].expiration); + delete this._scheduled[index]; + return true; + } else { + return false; + } + } + + async _free(index, job, options, eventInfo) { + var e, running; + try { + ({running} = (await this._store.__free__(index, options.weight))); + this.Events.trigger("debug", `Freed ${options.id}`, eventInfo); + if (running === 0 && this.empty()) { + return this.Events.trigger("idle"); + } + } catch (error1) { + e = error1; + return this.Events.trigger("error", e); + } + } + + _run(index, job, wait) { + var clearGlobalState, free, run; + job.doRun(); + clearGlobalState = this._clearGlobalState.bind(this, index); + run = this._run.bind(this, index, job); + free = this._free.bind(this, index, job); + return this._scheduled[index] = { + timeout: setTimeout(() => { + return job.doExecute(this._limiter, clearGlobalState, run, free); + }, wait), + expiration: job.options.expiration != null ? setTimeout(function() { + return job.doExpire(clearGlobalState, run, free); + }, wait + job.options.expiration) : void 0, + job: job + }; + } + + _drainOne(capacity) { + return this._registerLock.schedule(() => { + var args, index, next, options, queue; + if (this.queued() === 0) { + return this.Promise.resolve(null); + } + queue = this._queues.getFirst(); + ({options, args} = next = queue.first()); + if ((capacity != null) && options.weight > capacity) { + return this.Promise.resolve(null); + } + this.Events.trigger("debug", `Draining ${options.id}`, {args, options}); + index = this._randomIndex(); + return this._store.__register__(index, options.weight, options.expiration).then(({success, wait, reservoir}) => { + var empty; + this.Events.trigger("debug", `Drained ${options.id}`, {success, args, options}); + if (success) { + queue.shift(); + empty = this.empty(); + if (empty) { + this.Events.trigger("empty"); + } + if (reservoir === 0) { + this.Events.trigger("depleted", empty); + } + this._run(index, next, wait); + return this.Promise.resolve(options.weight); + } else { + return this.Promise.resolve(null); + } + }); + }); + } + + _drainAll(capacity, total = 0) { + return this._drainOne(capacity).then((drained) => { + var newCapacity; + if (drained != null) { + newCapacity = capacity != null ? capacity - drained : capacity; + return this._drainAll(newCapacity, total + drained); + } else { + return this.Promise.resolve(total); + } + }).catch((e) => { + return this.Events.trigger("error", e); + }); + } + + _dropAllQueued(message) { + return this._queues.shiftAll(function(job) { + return job.doDrop({message}); + }); + } + + stop(options = {}) { + var done, waitForExecuting; + options = parser$5.load(options, this.stopDefaults); + waitForExecuting = (at) => { + var finished; + finished = () => { + var counts; + counts = this._states.counts; + return (counts[0] + counts[1] + counts[2] + counts[3]) === at; + }; + return new this.Promise((resolve, reject) => { + if (finished()) { + return resolve(); + } else { + return this.on("done", () => { + if (finished()) { + this.removeAllListeners("done"); + return resolve(); + } + }); + } + }); + }; + done = options.dropWaitingJobs ? (this._run = function(index, next) { + return next.doDrop({ + message: options.dropErrorMessage + }); + }, this._drainOne = () => { + return this.Promise.resolve(null); + }, this._registerLock.schedule(() => { + return this._submitLock.schedule(() => { + var k, ref, v; + ref = this._scheduled; + for (k in ref) { + v = ref[k]; + if (this.jobStatus(v.job.options.id) === "RUNNING") { + clearTimeout(v.timeout); + clearTimeout(v.expiration); + v.job.doDrop({ + message: options.dropErrorMessage + }); + } + } + this._dropAllQueued(options.dropErrorMessage); + return waitForExecuting(0); + }); + })) : this.schedule({ + priority: NUM_PRIORITIES$1 - 1, + weight: 0 + }, () => { + return waitForExecuting(1); + }); + this._receive = function(job) { + return job._reject(new Bottleneck.prototype.BottleneckError(options.enqueueErrorMessage)); + }; + this.stop = () => { + return this.Promise.reject(new Bottleneck.prototype.BottleneckError("stop() has already been called")); + }; + return done; + } + + async _addToQueue(job) { + var args, blocked, error, options, reachedHWM, shifted, strategy; + ({args, options} = job); + try { + ({reachedHWM, blocked, strategy} = (await this._store.__submit__(this.queued(), options.weight))); + } catch (error1) { + error = error1; + this.Events.trigger("debug", `Could not queue ${options.id}`, {args, options, error}); + job.doDrop({error}); + return false; + } + if (blocked) { + job.doDrop(); + return true; + } else if (reachedHWM) { + shifted = strategy === Bottleneck.prototype.strategy.LEAK ? this._queues.shiftLastFrom(options.priority) : strategy === Bottleneck.prototype.strategy.OVERFLOW_PRIORITY ? this._queues.shiftLastFrom(options.priority + 1) : strategy === Bottleneck.prototype.strategy.OVERFLOW ? job : void 0; + if (shifted != null) { + shifted.doDrop(); + } + if ((shifted == null) || strategy === Bottleneck.prototype.strategy.OVERFLOW) { + if (shifted == null) { + job.doDrop(); + } + return reachedHWM; + } + } + job.doQueue(reachedHWM, blocked); + this._queues.push(job); + await this._drainAll(); + return reachedHWM; + } + + _receive(job) { + if (this._states.jobStatus(job.options.id) != null) { + job._reject(new Bottleneck.prototype.BottleneckError(`A job with the same id already exists (id=${job.options.id})`)); + return false; + } else { + job.doReceive(); + return this._submitLock.schedule(this._addToQueue, job); + } + } + + submit(...args) { + var cb, fn, job, options, ref, ref1, task; + if (typeof args[0] === "function") { + ref = args, [fn, ...args] = ref, [cb] = splice.call(args, -1); + options = parser$5.load({}, this.jobDefaults); + } else { + ref1 = args, [options, fn, ...args] = ref1, [cb] = splice.call(args, -1); + options = parser$5.load(options, this.jobDefaults); + } + task = (...args) => { + return new this.Promise(function(resolve, reject) { + return fn(...args, function(...args) { + return (args[0] != null ? reject : resolve)(args); + }); + }); + }; + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + job.promise.then(function(args) { + return typeof cb === "function" ? cb(...args) : void 0; + }).catch(function(args) { + if (Array.isArray(args)) { + return typeof cb === "function" ? cb(...args) : void 0; + } else { + return typeof cb === "function" ? cb(args) : void 0; + } + }); + return this._receive(job); + } + + schedule(...args) { + var job, options, task; + if (typeof args[0] === "function") { + [task, ...args] = args; + options = {}; + } else { + [options, task, ...args] = args; + } + job = new Job$1(task, args, options, this.jobDefaults, this.rejectOnDrop, this.Events, this._states, this.Promise); + this._receive(job); + return job.promise; + } + + wrap(fn) { + var schedule, wrapped; + schedule = this.schedule.bind(this); + wrapped = function(...args) { + return schedule(fn.bind(this), ...args); + }; + wrapped.withOptions = function(options, ...args) { + return schedule(options, fn, ...args); + }; + return wrapped; + } + + async updateSettings(options = {}) { + await this._store.__updateSettings__(parser$5.overwrite(options, this.storeDefaults)); + parser$5.overwrite(options, this.instanceDefaults, this); + return this; + } + + currentReservoir() { + return this._store.__currentReservoir__(); + } + + incrementReservoir(incr = 0) { + return this._store.__incrementReservoir__(incr); + } + + } + Bottleneck.default = Bottleneck; + + Bottleneck.Events = Events$4; + + Bottleneck.version = Bottleneck.prototype.version = require$$8.version; + + Bottleneck.strategy = Bottleneck.prototype.strategy = { + LEAK: 1, + OVERFLOW: 2, + OVERFLOW_PRIORITY: 4, + BLOCK: 3 + }; + + Bottleneck.BottleneckError = Bottleneck.prototype.BottleneckError = BottleneckError_1; + + Bottleneck.Group = Bottleneck.prototype.Group = Group_1; + + Bottleneck.RedisConnection = Bottleneck.prototype.RedisConnection = require$$2; + + Bottleneck.IORedisConnection = Bottleneck.prototype.IORedisConnection = require$$3; + + Bottleneck.Batcher = Bottleneck.prototype.Batcher = Batcher_1; + + Bottleneck.prototype.jobDefaults = { + priority: DEFAULT_PRIORITY$1, + weight: 1, + expiration: null, + id: "" + }; + + Bottleneck.prototype.storeDefaults = { + maxConcurrent: null, + minTime: 0, + highWater: null, + strategy: Bottleneck.prototype.strategy.LEAK, + penalty: null, + reservoir: null, + reservoirRefreshInterval: null, + reservoirRefreshAmount: null, + reservoirIncreaseInterval: null, + reservoirIncreaseAmount: null, + reservoirIncreaseMaximum: null + }; + + Bottleneck.prototype.localStoreDefaults = { + Promise: Promise, + timeout: null, + heartbeatInterval: 250 + }; + + Bottleneck.prototype.redisStoreDefaults = { + Promise: Promise, + timeout: null, + heartbeatInterval: 5000, + clientTimeout: 10000, + Redis: null, + clientOptions: {}, + clusterNodes: null, + clearDatastore: false, + connection: null + }; + + Bottleneck.prototype.instanceDefaults = { + datastore: "local", + connection: null, + id: "", + rejectOnDrop: true, + trackDoneStatus: false, + Promise: Promise + }; + + Bottleneck.prototype.stopDefaults = { + enqueueErrorMessage: "This limiter has been stopped and cannot accept new jobs.", + dropWaitingJobs: true, + dropErrorMessage: "This limiter has been stopped." + }; + + return Bottleneck; + + }).call(commonjsGlobal); + + var Bottleneck_1 = Bottleneck; + + var lib = Bottleneck_1; + + return lib; + +}))); /***/ }), @@ -32258,1404 +27359,6 @@ if (typeof Object.create === 'function') { }.call(this)); -/***/ }), - -/***/ 3973: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -module.exports = minimatch -minimatch.Minimatch = Minimatch - -var path = (function () { try { return __nccwpck_require__(1017) } catch (e) {}}()) || { - sep: '/' -} -minimatch.sep = path.sep - -var GLOBSTAR = minimatch.GLOBSTAR = Minimatch.GLOBSTAR = {} -var expand = __nccwpck_require__(3717) - -var plTypes = { - '!': { open: '(?:(?!(?:', close: '))[^/]*?)'}, - '?': { open: '(?:', close: ')?' }, - '+': { open: '(?:', close: ')+' }, - '*': { open: '(?:', close: ')*' }, - '@': { open: '(?:', close: ')' } -} - -// any single thing other than / -// don't need to escape / when using new RegExp() -var qmark = '[^/]' - -// * => any number of characters -var star = qmark + '*?' - -// ** when dots are allowed. Anything goes, except .. and . -// not (^ or / followed by one or two dots followed by $ or /), -// followed by anything, any number of times. -var twoStarDot = '(?:(?!(?:\\\/|^)(?:\\.{1,2})($|\\\/)).)*?' - -// not a ^ or / followed by a dot, -// followed by anything, any number of times. -var twoStarNoDot = '(?:(?!(?:\\\/|^)\\.).)*?' - -// characters that need to be escaped in RegExp. -var reSpecials = charSet('().*{}+?[]^$\\!') - -// "abc" -> { a:true, b:true, c:true } -function charSet (s) { - return s.split('').reduce(function (set, c) { - set[c] = true - return set - }, {}) -} - -// normalizes slashes. -var slashSplit = /\/+/ - -minimatch.filter = filter -function filter (pattern, options) { - options = options || {} - return function (p, i, list) { - return minimatch(p, pattern, options) - } -} - -function ext (a, b) { - b = b || {} - var t = {} - Object.keys(a).forEach(function (k) { - t[k] = a[k] - }) - Object.keys(b).forEach(function (k) { - t[k] = b[k] - }) - return t -} - -minimatch.defaults = function (def) { - if (!def || typeof def !== 'object' || !Object.keys(def).length) { - return minimatch - } - - var orig = minimatch - - var m = function minimatch (p, pattern, options) { - return orig(p, pattern, ext(def, options)) - } - - m.Minimatch = function Minimatch (pattern, options) { - return new orig.Minimatch(pattern, ext(def, options)) - } - m.Minimatch.defaults = function defaults (options) { - return orig.defaults(ext(def, options)).Minimatch - } - - m.filter = function filter (pattern, options) { - return orig.filter(pattern, ext(def, options)) - } - - m.defaults = function defaults (options) { - return orig.defaults(ext(def, options)) - } - - m.makeRe = function makeRe (pattern, options) { - return orig.makeRe(pattern, ext(def, options)) - } - - m.braceExpand = function braceExpand (pattern, options) { - return orig.braceExpand(pattern, ext(def, options)) - } - - m.match = function (list, pattern, options) { - return orig.match(list, pattern, ext(def, options)) - } - - return m -} - -Minimatch.defaults = function (def) { - return minimatch.defaults(def).Minimatch -} - -function minimatch (p, pattern, options) { - assertValidPattern(pattern) - - if (!options) options = {} - - // shortcut: comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - return false - } - - return new Minimatch(pattern, options).match(p) -} - -function Minimatch (pattern, options) { - if (!(this instanceof Minimatch)) { - return new Minimatch(pattern, options) - } - - assertValidPattern(pattern) - - if (!options) options = {} - - pattern = pattern.trim() - - // windows support: need to use /, not \ - if (!options.allowWindowsEscape && path.sep !== '/') { - pattern = pattern.split(path.sep).join('/') - } - - this.options = options - this.set = [] - this.pattern = pattern - this.regexp = null - this.negate = false - this.comment = false - this.empty = false - this.partial = !!options.partial - - // make the set of regexps etc. - this.make() -} - -Minimatch.prototype.debug = function () {} - -Minimatch.prototype.make = make -function make () { - var pattern = this.pattern - var options = this.options - - // empty patterns and comments match nothing. - if (!options.nocomment && pattern.charAt(0) === '#') { - this.comment = true - return - } - if (!pattern) { - this.empty = true - return - } - - // step 1: figure out negation, etc. - this.parseNegate() - - // step 2: expand braces - var set = this.globSet = this.braceExpand() - - if (options.debug) this.debug = function debug() { console.error.apply(console, arguments) } - - this.debug(this.pattern, set) - - // step 3: now we have a set, so turn each one into a series of path-portion - // matching patterns. - // These will be regexps, except in the case of "**", which is - // set to the GLOBSTAR object for globstar behavior, - // and will not contain any / characters - set = this.globParts = set.map(function (s) { - return s.split(slashSplit) - }) - - this.debug(this.pattern, set) - - // glob --> regexps - set = set.map(function (s, si, set) { - return s.map(this.parse, this) - }, this) - - this.debug(this.pattern, set) - - // filter out everything that didn't compile properly. - set = set.filter(function (s) { - return s.indexOf(false) === -1 - }) - - this.debug(this.pattern, set) - - this.set = set -} - -Minimatch.prototype.parseNegate = parseNegate -function parseNegate () { - var pattern = this.pattern - var negate = false - var options = this.options - var negateOffset = 0 - - if (options.nonegate) return - - for (var i = 0, l = pattern.length - ; i < l && pattern.charAt(i) === '!' - ; i++) { - negate = !negate - negateOffset++ - } - - if (negateOffset) this.pattern = pattern.substr(negateOffset) - this.negate = negate -} - -// Brace expansion: -// a{b,c}d -> abd acd -// a{b,}c -> abc ac -// a{0..3}d -> a0d a1d a2d a3d -// a{b,c{d,e}f}g -> abg acdfg acefg -// a{b,c}d{e,f}g -> abdeg acdeg abdeg abdfg -// -// Invalid sets are not expanded. -// a{2..}b -> a{2..}b -// a{b}c -> a{b}c -minimatch.braceExpand = function (pattern, options) { - return braceExpand(pattern, options) -} - -Minimatch.prototype.braceExpand = braceExpand - -function braceExpand (pattern, options) { - if (!options) { - if (this instanceof Minimatch) { - options = this.options - } else { - options = {} - } - } - - pattern = typeof pattern === 'undefined' - ? this.pattern : pattern - - assertValidPattern(pattern) - - // Thanks to Yeting Li for - // improving this regexp to avoid a ReDOS vulnerability. - if (options.nobrace || !/\{(?:(?!\{).)*\}/.test(pattern)) { - // shortcut. no need to expand. - return [pattern] - } - - return expand(pattern) -} - -var MAX_PATTERN_LENGTH = 1024 * 64 -var assertValidPattern = function (pattern) { - if (typeof pattern !== 'string') { - throw new TypeError('invalid pattern') - } - - if (pattern.length > MAX_PATTERN_LENGTH) { - throw new TypeError('pattern is too long') - } -} - -// parse a component of the expanded set. -// At this point, no pattern may contain "/" in it -// so we're going to return a 2d array, where each entry is the full -// pattern, split on '/', and then turned into a regular expression. -// A regexp is made at the end which joins each array with an -// escaped /, and another full one which joins each regexp with |. -// -// Following the lead of Bash 4.1, note that "**" only has special meaning -// when it is the *only* thing in a path portion. Otherwise, any series -// of * is equivalent to a single *. Globstar behavior is enabled by -// default, and can be disabled by setting options.noglobstar. -Minimatch.prototype.parse = parse -var SUBPARSE = {} -function parse (pattern, isSub) { - assertValidPattern(pattern) - - var options = this.options - - // shortcuts - if (pattern === '**') { - if (!options.noglobstar) - return GLOBSTAR - else - pattern = '*' - } - if (pattern === '') return '' - - var re = '' - var hasMagic = !!options.nocase - var escaping = false - // ? => one single character - var patternListStack = [] - var negativeLists = [] - var stateChar - var inClass = false - var reClassStart = -1 - var classStart = -1 - // . and .. never match anything that doesn't start with ., - // even when options.dot is set. - var patternStart = pattern.charAt(0) === '.' ? '' // anything - // not (start or / followed by . or .. followed by / or end) - : options.dot ? '(?!(?:^|\\\/)\\.{1,2}(?:$|\\\/))' - : '(?!\\.)' - var self = this - - function clearStateChar () { - if (stateChar) { - // we had some state-tracking character - // that wasn't consumed by this pass. - switch (stateChar) { - case '*': - re += star - hasMagic = true - break - case '?': - re += qmark - hasMagic = true - break - default: - re += '\\' + stateChar - break - } - self.debug('clearStateChar %j %j', stateChar, re) - stateChar = false - } - } - - for (var i = 0, len = pattern.length, c - ; (i < len) && (c = pattern.charAt(i)) - ; i++) { - this.debug('%s\t%s %s %j', pattern, i, re, c) - - // skip over any that are escaped. - if (escaping && reSpecials[c]) { - re += '\\' + c - escaping = false - continue - } - - switch (c) { - /* istanbul ignore next */ - case '/': { - // completely not allowed, even escaped. - // Should already be path-split by now. - return false - } - - case '\\': - clearStateChar() - escaping = true - continue - - // the various stateChar values - // for the "extglob" stuff. - case '?': - case '*': - case '+': - case '@': - case '!': - this.debug('%s\t%s %s %j <-- stateChar', pattern, i, re, c) - - // all of those are literals inside a class, except that - // the glob [!a] means [^a] in regexp - if (inClass) { - this.debug(' in class') - if (c === '!' && i === classStart + 1) c = '^' - re += c - continue - } - - // if we already have a stateChar, then it means - // that there was something like ** or +? in there. - // Handle the stateChar, then proceed with this one. - self.debug('call clearStateChar %j', stateChar) - clearStateChar() - stateChar = c - // if extglob is disabled, then +(asdf|foo) isn't a thing. - // just clear the statechar *now*, rather than even diving into - // the patternList stuff. - if (options.noext) clearStateChar() - continue - - case '(': - if (inClass) { - re += '(' - continue - } - - if (!stateChar) { - re += '\\(' - continue - } - - patternListStack.push({ - type: stateChar, - start: i - 1, - reStart: re.length, - open: plTypes[stateChar].open, - close: plTypes[stateChar].close - }) - // negation is (?:(?!js)[^/]*) - re += stateChar === '!' ? '(?:(?!(?:' : '(?:' - this.debug('plType %j %j', stateChar, re) - stateChar = false - continue - - case ')': - if (inClass || !patternListStack.length) { - re += '\\)' - continue - } - - clearStateChar() - hasMagic = true - var pl = patternListStack.pop() - // negation is (?:(?!js)[^/]*) - // The others are (?:) - re += pl.close - if (pl.type === '!') { - negativeLists.push(pl) - } - pl.reEnd = re.length - continue - - case '|': - if (inClass || !patternListStack.length || escaping) { - re += '\\|' - escaping = false - continue - } - - clearStateChar() - re += '|' - continue - - // these are mostly the same in regexp and glob - case '[': - // swallow any state-tracking char before the [ - clearStateChar() - - if (inClass) { - re += '\\' + c - continue - } - - inClass = true - classStart = i - reClassStart = re.length - re += c - continue - - case ']': - // a right bracket shall lose its special - // meaning and represent itself in - // a bracket expression if it occurs - // first in the list. -- POSIX.2 2.8.3.2 - if (i === classStart + 1 || !inClass) { - re += '\\' + c - escaping = false - continue - } - - // handle the case where we left a class open. - // "[z-a]" is valid, equivalent to "\[z-a\]" - // split where the last [ was, make sure we don't have - // an invalid re. if so, re-walk the contents of the - // would-be class to re-translate any characters that - // were passed through as-is - // TODO: It would probably be faster to determine this - // without a try/catch and a new RegExp, but it's tricky - // to do safely. For now, this is safe and works. - var cs = pattern.substring(classStart + 1, i) - try { - RegExp('[' + cs + ']') - } catch (er) { - // not a valid class! - var sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] + '\\]' - hasMagic = hasMagic || sp[1] - inClass = false - continue - } - - // finish up the class. - hasMagic = true - inClass = false - re += c - continue - - default: - // swallow any state char that wasn't consumed - clearStateChar() - - if (escaping) { - // no need - escaping = false - } else if (reSpecials[c] - && !(c === '^' && inClass)) { - re += '\\' - } - - re += c - - } // switch - } // for - - // handle the case where we left a class open. - // "[abc" is valid, equivalent to "\[abc" - if (inClass) { - // split where the last [ was, and escape it - // this is a huge pita. We now have to re-walk - // the contents of the would-be class to re-translate - // any characters that were passed through as-is - cs = pattern.substr(classStart + 1) - sp = this.parse(cs, SUBPARSE) - re = re.substr(0, reClassStart) + '\\[' + sp[0] - hasMagic = hasMagic || sp[1] - } - - // handle the case where we had a +( thing at the *end* - // of the pattern. - // each pattern list stack adds 3 chars, and we need to go through - // and escape any | chars that were passed through as-is for the regexp. - // Go through and escape them, taking care not to double-escape any - // | chars that were already escaped. - for (pl = patternListStack.pop(); pl; pl = patternListStack.pop()) { - var tail = re.slice(pl.reStart + pl.open.length) - this.debug('setting tail', re, pl) - // maybe some even number of \, then maybe 1 \, followed by a | - tail = tail.replace(/((?:\\{2}){0,64})(\\?)\|/g, function (_, $1, $2) { - if (!$2) { - // the | isn't already escaped, so escape it. - $2 = '\\' - } - - // need to escape all those slashes *again*, without escaping the - // one that we need for escaping the | character. As it works out, - // escaping an even number of slashes can be done by simply repeating - // it exactly after itself. That's why this trick works. - // - // I am sorry that you have to see this. - return $1 + $1 + $2 + '|' - }) - - this.debug('tail=%j\n %s', tail, tail, pl, re) - var t = pl.type === '*' ? star - : pl.type === '?' ? qmark - : '\\' + pl.type - - hasMagic = true - re = re.slice(0, pl.reStart) + t + '\\(' + tail - } - - // handle trailing things that only matter at the very end. - clearStateChar() - if (escaping) { - // trailing \\ - re += '\\\\' - } - - // only need to apply the nodot start if the re starts with - // something that could conceivably capture a dot - var addPatternStart = false - switch (re.charAt(0)) { - case '[': case '.': case '(': addPatternStart = true - } - - // Hack to work around lack of negative lookbehind in JS - // A pattern like: *.!(x).!(y|z) needs to ensure that a name - // like 'a.xyz.yz' doesn't match. So, the first negative - // lookahead, has to look ALL the way ahead, to the end of - // the pattern. - for (var n = negativeLists.length - 1; n > -1; n--) { - var nl = negativeLists[n] - - var nlBefore = re.slice(0, nl.reStart) - var nlFirst = re.slice(nl.reStart, nl.reEnd - 8) - var nlLast = re.slice(nl.reEnd - 8, nl.reEnd) - var nlAfter = re.slice(nl.reEnd) - - nlLast += nlAfter - - // Handle nested stuff like *(*.js|!(*.json)), where open parens - // mean that we should *not* include the ) in the bit that is considered - // "after" the negated section. - var openParensBefore = nlBefore.split('(').length - 1 - var cleanAfter = nlAfter - for (i = 0; i < openParensBefore; i++) { - cleanAfter = cleanAfter.replace(/\)[+*?]?/, '') - } - nlAfter = cleanAfter - - var dollar = '' - if (nlAfter === '' && isSub !== SUBPARSE) { - dollar = '$' - } - var newRe = nlBefore + nlFirst + nlAfter + dollar + nlLast - re = newRe - } - - // if the re is not "" at this point, then we need to make sure - // it doesn't match against an empty path part. - // Otherwise a/* will match a/, which it should not. - if (re !== '' && hasMagic) { - re = '(?=.)' + re - } - - if (addPatternStart) { - re = patternStart + re - } - - // parsing just a piece of a larger pattern. - if (isSub === SUBPARSE) { - return [re, hasMagic] - } - - // skip the regexp for non-magical patterns - // unescape anything in it, though, so that it'll be - // an exact match against a file etc. - if (!hasMagic) { - return globUnescape(pattern) - } - - var flags = options.nocase ? 'i' : '' - try { - var regExp = new RegExp('^' + re + '$', flags) - } catch (er) /* istanbul ignore next - should be impossible */ { - // If it was an invalid regular expression, then it can't match - // anything. This trick looks for a character after the end of - // the string, which is of course impossible, except in multi-line - // mode, but it's not a /m regex. - return new RegExp('$.') - } - - regExp._glob = pattern - regExp._src = re - - return regExp -} - -minimatch.makeRe = function (pattern, options) { - return new Minimatch(pattern, options || {}).makeRe() -} - -Minimatch.prototype.makeRe = makeRe -function makeRe () { - if (this.regexp || this.regexp === false) return this.regexp - - // at this point, this.set is a 2d array of partial - // pattern strings, or "**". - // - // It's better to use .match(). This function shouldn't - // be used, really, but it's pretty convenient sometimes, - // when you just want to work with a regex. - var set = this.set - - if (!set.length) { - this.regexp = false - return this.regexp - } - var options = this.options - - var twoStar = options.noglobstar ? star - : options.dot ? twoStarDot - : twoStarNoDot - var flags = options.nocase ? 'i' : '' - - var re = set.map(function (pattern) { - return pattern.map(function (p) { - return (p === GLOBSTAR) ? twoStar - : (typeof p === 'string') ? regExpEscape(p) - : p._src - }).join('\\\/') - }).join('|') - - // must match entire pattern - // ending in a * or ** will make it less strict. - re = '^(?:' + re + ')$' - - // can match anything, as long as it's not this. - if (this.negate) re = '^(?!' + re + ').*$' - - try { - this.regexp = new RegExp(re, flags) - } catch (ex) /* istanbul ignore next - should be impossible */ { - this.regexp = false - } - return this.regexp -} - -minimatch.match = function (list, pattern, options) { - options = options || {} - var mm = new Minimatch(pattern, options) - list = list.filter(function (f) { - return mm.match(f) - }) - if (mm.options.nonull && !list.length) { - list.push(pattern) - } - return list -} - -Minimatch.prototype.match = function match (f, partial) { - if (typeof partial === 'undefined') partial = this.partial - this.debug('match', f, this.pattern) - // short-circuit in the case of busted things. - // comments, etc. - if (this.comment) return false - if (this.empty) return f === '' - - if (f === '/' && partial) return true - - var options = this.options - - // windows: need to use /, not \ - if (path.sep !== '/') { - f = f.split(path.sep).join('/') - } - - // treat the test path as a set of pathparts. - f = f.split(slashSplit) - this.debug(this.pattern, 'split', f) - - // just ONE of the pattern sets in this.set needs to match - // in order for it to be valid. If negating, then just one - // match means that we have failed. - // Either way, return on the first hit. - - var set = this.set - this.debug(this.pattern, 'set', set) - - // Find the basename of the path by looking for the last non-empty segment - var filename - var i - for (i = f.length - 1; i >= 0; i--) { - filename = f[i] - if (filename) break - } - - for (i = 0; i < set.length; i++) { - var pattern = set[i] - var file = f - if (options.matchBase && pattern.length === 1) { - file = [filename] - } - var hit = this.matchOne(file, pattern, partial) - if (hit) { - if (options.flipNegate) return true - return !this.negate - } - } - - // didn't get any hits. this is success if it's a negative - // pattern, failure otherwise. - if (options.flipNegate) return false - return this.negate -} - -// set partial to true to test if, for example, -// "/a/b" matches the start of "/*/b/*/d" -// Partial means, if you run out of file before you run -// out of pattern, then that's fine, as long as all -// the parts match. -Minimatch.prototype.matchOne = function (file, pattern, partial) { - var options = this.options - - this.debug('matchOne', - { 'this': this, file: file, pattern: pattern }) - - this.debug('matchOne', file.length, pattern.length) - - for (var fi = 0, - pi = 0, - fl = file.length, - pl = pattern.length - ; (fi < fl) && (pi < pl) - ; fi++, pi++) { - this.debug('matchOne loop') - var p = pattern[pi] - var f = file[fi] - - this.debug(pattern, p, f) - - // should be impossible. - // some invalid regexp stuff in the set. - /* istanbul ignore if */ - if (p === false) return false - - if (p === GLOBSTAR) { - this.debug('GLOBSTAR', [pattern, p, f]) - - // "**" - // a/**/b/**/c would match the following: - // a/b/x/y/z/c - // a/x/y/z/b/c - // a/b/x/b/x/c - // a/b/c - // To do this, take the rest of the pattern after - // the **, and see if it would match the file remainder. - // If so, return success. - // If not, the ** "swallows" a segment, and try again. - // This is recursively awful. - // - // a/**/b/**/c matching a/b/x/y/z/c - // - a matches a - // - doublestar - // - matchOne(b/x/y/z/c, b/**/c) - // - b matches b - // - doublestar - // - matchOne(x/y/z/c, c) -> no - // - matchOne(y/z/c, c) -> no - // - matchOne(z/c, c) -> no - // - matchOne(c, c) yes, hit - var fr = fi - var pr = pi + 1 - if (pr === pl) { - this.debug('** at the end') - // a ** at the end will just swallow the rest. - // We have found a match. - // however, it will not swallow /.x, unless - // options.dot is set. - // . and .. are *never* matched by **, for explosively - // exponential reasons. - for (; fi < fl; fi++) { - if (file[fi] === '.' || file[fi] === '..' || - (!options.dot && file[fi].charAt(0) === '.')) return false - } - return true - } - - // ok, let's see if we can swallow whatever we can. - while (fr < fl) { - var swallowee = file[fr] - - this.debug('\nglobstar while', file, fr, pattern, pr, swallowee) - - // XXX remove this slice. Just pass the start index. - if (this.matchOne(file.slice(fr), pattern.slice(pr), partial)) { - this.debug('globstar found match!', fr, fl, swallowee) - // found a match. - return true - } else { - // can't swallow "." or ".." ever. - // can only swallow ".foo" when explicitly asked. - if (swallowee === '.' || swallowee === '..' || - (!options.dot && swallowee.charAt(0) === '.')) { - this.debug('dot detected!', file, fr, pattern, pr) - break - } - - // ** swallows a segment, and continue. - this.debug('globstar swallow a segment, and continue') - fr++ - } - } - - // no match was found. - // However, in partial mode, we can't say this is necessarily over. - // If there's more *pattern* left, then - /* istanbul ignore if */ - if (partial) { - // ran out of file - this.debug('\n>>> no match, partial?', file, fr, pattern, pr) - if (fr === fl) return true - } - return false - } - - // something other than ** - // non-magic patterns just have to match exactly - // patterns with magic have been turned into regexps. - var hit - if (typeof p === 'string') { - hit = f === p - this.debug('string match', p, f, hit) - } else { - hit = f.match(p) - this.debug('pattern match', p, f, hit) - } - - if (!hit) return false - } - - // Note: ending in / means that we'll get a final "" - // at the end of the pattern. This can only match a - // corresponding "" at the end of the file. - // If the file ends in /, then it can only match a - // a pattern that ends in /, unless the pattern just - // doesn't have any more for it. But, a/b/ should *not* - // match "a/b/*", even though "" matches against the - // [^/]*? pattern, except in partial mode, where it might - // simply not be reached yet. - // However, a/b/ should still satisfy a/* - - // now either we fell off the end of the pattern, or we're done. - if (fi === fl && pi === pl) { - // ran out of pattern and filename at the same time. - // an exact hit! - return true - } else if (fi === fl) { - // ran out of file, but still had pattern left. - // this is ok if we're doing the match as part of - // a glob fs traversal. - return partial - } else /* istanbul ignore else */ if (pi === pl) { - // ran out of pattern, still have file left. - // this is only acceptable if we're on the very last - // empty segment of a file with a trailing slash. - // a/* should match a/b/ - return (fi === fl - 1) && (file[fi] === '') - } - - // should be unreachable. - /* istanbul ignore next */ - throw new Error('wtf?') -} - -// replace stuff like \* with * -function globUnescape (s) { - return s.replace(/\\(.)/g, '$1') -} - -function regExpEscape (s) { - return s.replace(/[-[\]{}()*+?.,\\^$|#\s]/g, '\\$&') -} - - -/***/ }), - -/***/ 1223: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -var wrappy = __nccwpck_require__(2940) -module.exports = wrappy(once) -module.exports.strict = wrappy(onceStrict) - -once.proto = once(function () { - Object.defineProperty(Function.prototype, 'once', { - value: function () { - return once(this) - }, - configurable: true - }) - - Object.defineProperty(Function.prototype, 'onceStrict', { - value: function () { - return onceStrict(this) - }, - configurable: true - }) -}) - -function once (fn) { - var f = function () { - if (f.called) return f.value - f.called = true - return f.value = fn.apply(this, arguments) - } - f.called = false - return f -} - -function onceStrict (fn) { - var f = function () { - if (f.called) - throw new Error(f.onceError) - f.called = true - return f.value = fn.apply(this, arguments) - } - var name = fn.name || 'Function wrapped with `once`' - f.onceError = name + " shouldn't be called more than once" - f.called = false - return f -} - - -/***/ }), - -/***/ 8714: -/***/ ((module) => { - -"use strict"; - - -function posix(path) { - return path.charAt(0) === '/'; -} - -function win32(path) { - // https://github.com/nodejs/node/blob/b3fcc245fb25539909ef1d5eaa01dbf92e168633/lib/path.js#L56 - var splitDeviceRe = /^([a-zA-Z]:|[\\\/]{2}[^\\\/]+[\\\/]+[^\\\/]+)?([\\\/])?([\s\S]*?)$/; - var result = splitDeviceRe.exec(path); - var device = result[1] || ''; - var isUnc = Boolean(device && device.charAt(1) !== ':'); - - // UNC paths are always absolute - return Boolean(result[2] || isUnc); -} - -module.exports = process.platform === 'win32' ? win32 : posix; -module.exports.posix = posix; -module.exports.win32 = win32; - - -/***/ }), - -/***/ 4959: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -const assert = __nccwpck_require__(9491) -const path = __nccwpck_require__(1017) -const fs = __nccwpck_require__(7147) -let glob = undefined -try { - glob = __nccwpck_require__(1957) -} catch (_err) { - // treat glob as optional. -} - -const defaultGlobOpts = { - nosort: true, - silent: true -} - -// for EMFILE handling -let timeout = 0 - -const isWindows = (process.platform === "win32") - -const defaults = options => { - const methods = [ - 'unlink', - 'chmod', - 'stat', - 'lstat', - 'rmdir', - 'readdir' - ] - methods.forEach(m => { - options[m] = options[m] || fs[m] - m = m + 'Sync' - options[m] = options[m] || fs[m] - }) - - options.maxBusyTries = options.maxBusyTries || 3 - options.emfileWait = options.emfileWait || 1000 - if (options.glob === false) { - options.disableGlob = true - } - if (options.disableGlob !== true && glob === undefined) { - throw Error('glob dependency not found, set `options.disableGlob = true` if intentional') - } - options.disableGlob = options.disableGlob || false - options.glob = options.glob || defaultGlobOpts -} - -const rimraf = (p, options, cb) => { - if (typeof options === 'function') { - cb = options - options = {} - } - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert.equal(typeof cb, 'function', 'rimraf: callback function required') - assert(options, 'rimraf: invalid options argument provided') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - defaults(options) - - let busyTries = 0 - let errState = null - let n = 0 - - const next = (er) => { - errState = errState || er - if (--n === 0) - cb(errState) - } - - const afterGlob = (er, results) => { - if (er) - return cb(er) - - n = results.length - if (n === 0) - return cb() - - results.forEach(p => { - const CB = (er) => { - if (er) { - if ((er.code === "EBUSY" || er.code === "ENOTEMPTY" || er.code === "EPERM") && - busyTries < options.maxBusyTries) { - busyTries ++ - // try again, with the same exact callback as this one. - return setTimeout(() => rimraf_(p, options, CB), busyTries * 100) - } - - // this one won't happen if graceful-fs is used. - if (er.code === "EMFILE" && timeout < options.emfileWait) { - return setTimeout(() => rimraf_(p, options, CB), timeout ++) - } - - // already gone - if (er.code === "ENOENT") er = null - } - - timeout = 0 - next(er) - } - rimraf_(p, options, CB) - }) - } - - if (options.disableGlob || !glob.hasMagic(p)) - return afterGlob(null, [p]) - - options.lstat(p, (er, stat) => { - if (!er) - return afterGlob(null, [p]) - - glob(p, options.glob, afterGlob) - }) - -} - -// Two possible strategies. -// 1. Assume it's a file. unlink it, then do the dir stuff on EPERM or EISDIR -// 2. Assume it's a directory. readdir, then do the file stuff on ENOTDIR -// -// Both result in an extra syscall when you guess wrong. However, there -// are likely far more normal files in the world than directories. This -// is based on the assumption that a the average number of files per -// directory is >= 1. -// -// If anyone ever complains about this, then I guess the strategy could -// be made configurable somehow. But until then, YAGNI. -const rimraf_ = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // sunos lets the root user unlink directories, which is... weird. - // so we have to lstat here and make sure it's not a dir. - options.lstat(p, (er, st) => { - if (er && er.code === "ENOENT") - return cb(null) - - // Windows can EPERM on stat. Life is suffering. - if (er && er.code === "EPERM" && isWindows) - fixWinEPERM(p, options, er, cb) - - if (st && st.isDirectory()) - return rmdir(p, options, er, cb) - - options.unlink(p, er => { - if (er) { - if (er.code === "ENOENT") - return cb(null) - if (er.code === "EPERM") - return (isWindows) - ? fixWinEPERM(p, options, er, cb) - : rmdir(p, options, er, cb) - if (er.code === "EISDIR") - return rmdir(p, options, er, cb) - } - return cb(er) - }) - }) -} - -const fixWinEPERM = (p, options, er, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.chmod(p, 0o666, er2 => { - if (er2) - cb(er2.code === "ENOENT" ? null : er) - else - options.stat(p, (er3, stats) => { - if (er3) - cb(er3.code === "ENOENT" ? null : er) - else if (stats.isDirectory()) - rmdir(p, options, er, cb) - else - options.unlink(p, cb) - }) - }) -} - -const fixWinEPERMSync = (p, options, er) => { - assert(p) - assert(options) - - try { - options.chmodSync(p, 0o666) - } catch (er2) { - if (er2.code === "ENOENT") - return - else - throw er - } - - let stats - try { - stats = options.statSync(p) - } catch (er3) { - if (er3.code === "ENOENT") - return - else - throw er - } - - if (stats.isDirectory()) - rmdirSync(p, options, er) - else - options.unlinkSync(p) -} - -const rmdir = (p, options, originalEr, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - // try to rmdir first, and only readdir on ENOTEMPTY or EEXIST (SunOS) - // if we guessed wrong, and it's not a directory, then - // raise the original error. - options.rmdir(p, er => { - if (er && (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM")) - rmkids(p, options, cb) - else if (er && er.code === "ENOTDIR") - cb(originalEr) - else - cb(er) - }) -} - -const rmkids = (p, options, cb) => { - assert(p) - assert(options) - assert(typeof cb === 'function') - - options.readdir(p, (er, files) => { - if (er) - return cb(er) - let n = files.length - if (n === 0) - return options.rmdir(p, cb) - let errState - files.forEach(f => { - rimraf(path.join(p, f), options, er => { - if (errState) - return - if (er) - return cb(errState = er) - if (--n === 0) - options.rmdir(p, cb) - }) - }) - }) -} - -// this looks simpler, and is strictly *faster*, but will -// tie up the JavaScript thread and fail on excessively -// deep directory trees. -const rimrafSync = (p, options) => { - options = options || {} - defaults(options) - - assert(p, 'rimraf: missing path') - assert.equal(typeof p, 'string', 'rimraf: path should be a string') - assert(options, 'rimraf: missing options') - assert.equal(typeof options, 'object', 'rimraf: options should be object') - - let results - - if (options.disableGlob || !glob.hasMagic(p)) { - results = [p] - } else { - try { - options.lstatSync(p) - results = [p] - } catch (er) { - results = glob.sync(p, options.glob) - } - } - - if (!results.length) - return - - for (let i = 0; i < results.length; i++) { - const p = results[i] - - let st - try { - st = options.lstatSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - - // Windows can EPERM on stat. Life is suffering. - if (er.code === "EPERM" && isWindows) - fixWinEPERMSync(p, options, er) - } - - try { - // sunos lets the root user unlink directories, which is... weird. - if (st && st.isDirectory()) - rmdirSync(p, options, null) - else - options.unlinkSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "EPERM") - return isWindows ? fixWinEPERMSync(p, options, er) : rmdirSync(p, options, er) - if (er.code !== "EISDIR") - throw er - - rmdirSync(p, options, er) - } - } -} - -const rmdirSync = (p, options, originalEr) => { - assert(p) - assert(options) - - try { - options.rmdirSync(p) - } catch (er) { - if (er.code === "ENOENT") - return - if (er.code === "ENOTDIR") - throw originalEr - if (er.code === "ENOTEMPTY" || er.code === "EEXIST" || er.code === "EPERM") - rmkidsSync(p, options) - } -} - -const rmkidsSync = (p, options) => { - assert(p) - assert(options) - options.readdirSync(p).forEach(f => rimrafSync(path.join(p, f), options)) - - // We only end up here once we got ENOTEMPTY at least once, and - // at this point, we are guaranteed to have removed all the kids. - // So, we know that it won't be ENOENT or ENOTDIR or anything else. - // try really hard to delete stuff on windows, because it has a - // PROFOUNDLY annoying habit of not closing handles promptly when - // files are deleted, resulting in spurious ENOTEMPTY errors. - const retries = isWindows ? 100 : 1 - let i = 0 - do { - let threw = true - try { - const ret = options.rmdirSync(p, options) - threw = false - return ret - } finally { - if (++i < retries && threw) - continue - } - } while (true) -} - -module.exports = rimraf -rimraf.sync = rimrafSync - - /***/ }), /***/ 8065: @@ -33735,7 +27438,6 @@ const os = __nccwpck_require__(2037); const path = __nccwpck_require__(1017); const crypto = __nccwpck_require__(6113); const _c = { fs: fs.constants, os: os.constants }; -const rimraf = __nccwpck_require__(4959); /* * The working inner variables. @@ -33764,12 +27466,32 @@ const _removeObjects = [], // API change in fs.rmdirSync leads to error when passing in a second parameter, e.g. the callback - FN_RMDIR_SYNC = fs.rmdirSync.bind(fs), - FN_RIMRAF_SYNC = rimraf.sync; + FN_RMDIR_SYNC = fs.rmdirSync.bind(fs); let _gracefulCleanup = false; +/** + * Recursively remove a directory and its contents. + * + * @param {string} dirPath path of directory to remove + * @param {Function} callback + * @private + */ +function rimraf(dirPath, callback) { + return fs.rm(dirPath, { recursive: true }, callback); +} + +/** + * Recursively remove a directory and its contents, synchronously. + * + * @param {string} dirPath path of directory to remove + * @private + */ +function FN_RIMRAF_SYNC(dirPath) { + return fs.rmSync(dirPath, { recursive: true }); +} + /** * Gets a temporary file name. * @@ -34256,7 +27978,7 @@ function _assertAndSanitizeOptions(options) { options.template = _isBlank(options.template) ? undefined : path.relative(options.dir, options.template); // for completeness' sake only, also keep (multiple) blanks if the user, purportedly sane, requests us to - options.name = _isUndefined(options.name) ? undefined : _sanitizeName(options.name); + options.name = _isUndefined(options.name) ? undefined : options.name; options.prefix = _isUndefined(options.prefix) ? '' : options.prefix; options.postfix = _isUndefined(options.postfix) ? '' : options.postfix; } @@ -34273,26 +27995,11 @@ function _assertAndSanitizeOptions(options) { * @private */ function _resolvePath(name, tmpDir) { - const sanitizedName = _sanitizeName(name); - if (sanitizedName.startsWith(tmpDir)) { - return path.resolve(sanitizedName); + if (name.startsWith(tmpDir)) { + return path.resolve(name); } else { - return path.resolve(path.join(tmpDir, sanitizedName)); - } -} - -/** - * Sanitize the specified path name by removing all quote characters. - * - * @param name - * @returns {string} - * @private - */ -function _sanitizeName(name) { - if (_isBlank(name)) { - return name; + return path.resolve(path.join(tmpDir, name)); } - return name.replace(/["']/g, ''); } /** @@ -34384,7 +28091,7 @@ function setGracefulCleanup() { * @returns {string} the currently configured tmp dir */ function _getTmpDir(options) { - return path.resolve(_sanitizeName(options && options.tmpdir || os.tmpdir())); + return path.resolve(options && options.tmpdir || os.tmpdir()); } // Install process exit listener @@ -56941,32 +50648,6 @@ module.exports = { } -/***/ }), - -/***/ 5030: -/***/ ((__unused_webpack_module, exports) => { - -"use strict"; - - -Object.defineProperty(exports, "__esModule", ({ value: true })); - -function getUserAgent() { - if (typeof navigator === "object" && "userAgent" in navigator) { - return navigator.userAgent; - } - - if (typeof process === "object" && process.version !== undefined) { - return `Node.js/${process.version.substr(1)} (${process.platform}; ${process.arch})`; - } - - return ""; -} - -exports.getUserAgent = getUserAgent; -//# sourceMappingURL=index.js.map - - /***/ }), /***/ 5840: @@ -57035,27 +50716,512 @@ Object.defineProperty(exports, "parse", ({ var _v = _interopRequireDefault(__nccwpck_require__(8628)); -var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); +var _v2 = _interopRequireDefault(__nccwpck_require__(6409)); + +var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); + +var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); + +var _nil = _interopRequireDefault(__nccwpck_require__(5332)); + +var _version = _interopRequireDefault(__nccwpck_require__(1595)); + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/***/ }), + +/***/ 4569: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function md5(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('md5').update(bytes).digest(); +} + +var _default = md5; +exports["default"] = _default; + +/***/ }), + +/***/ 5332: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = '00000000-0000-0000-0000-000000000000'; +exports["default"] = _default; + +/***/ }), + +/***/ 2746: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function parse(uuid) { + if (!(0, _validate.default)(uuid)) { + throw TypeError('Invalid UUID'); + } + + let v; + const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + + arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; + arr[1] = v >>> 16 & 0xff; + arr[2] = v >>> 8 & 0xff; + arr[3] = v & 0xff; // Parse ........-####-....-....-............ + + arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; + arr[5] = v & 0xff; // Parse ........-....-####-....-............ + + arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; + arr[7] = v & 0xff; // Parse ........-....-....-####-............ + + arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; + arr[9] = v & 0xff; // Parse ........-....-....-....-############ + // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) + + arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; + arr[11] = v / 0x100000000 & 0xff; + arr[12] = v >>> 24 & 0xff; + arr[13] = v >>> 16 & 0xff; + arr[14] = v >>> 8 & 0xff; + arr[15] = v & 0xff; + return arr; +} + +var _default = parse; +exports["default"] = _default; + +/***/ }), + +/***/ 814: +/***/ ((__unused_webpack_module, exports) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; +var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; +exports["default"] = _default; + +/***/ }), + +/***/ 807: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = rng; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate + +let poolPtr = rnds8Pool.length; + +function rng() { + if (poolPtr > rnds8Pool.length - 16) { + _crypto.default.randomFillSync(rnds8Pool); + + poolPtr = 0; + } + + return rnds8Pool.slice(poolPtr, poolPtr += 16); +} + +/***/ }), + +/***/ 5274: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function sha1(bytes) { + if (Array.isArray(bytes)) { + bytes = Buffer.from(bytes); + } else if (typeof bytes === 'string') { + bytes = Buffer.from(bytes, 'utf8'); + } + + return _crypto.default.createHash('sha1').update(bytes).digest(); +} + +var _default = sha1; +exports["default"] = _default; + +/***/ }), + +/***/ 8950: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +/** + * Convert array of 16 byte values to UUID string format of the form: + * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX + */ +const byteToHex = []; + +for (let i = 0; i < 256; ++i) { + byteToHex.push((i + 0x100).toString(16).substr(1)); +} + +function stringify(arr, offset = 0) { + // Note: Be careful editing this code! It's been tuned for performance + // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 + const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one + // of the following: + // - One or more input array values don't map to a hex octet (leading to + // "undefined" in the uuid) + // - Invalid input values for the RFC `version` or `variant` fields + + if (!(0, _validate.default)(uuid)) { + throw TypeError('Stringified UUID is invalid'); + } + + return uuid; +} + +var _default = stringify; +exports["default"] = _default; + +/***/ }), + +/***/ 8628: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +// **`v1()` - Generate time-based UUID** +// +// Inspired by https://github.com/LiosK/UUID.js +// and http://docs.python.org/library/uuid.html +let _nodeId; + +let _clockseq; // Previous uuid creation time + + +let _lastMSecs = 0; +let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details + +function v1(options, buf, offset) { + let i = buf && offset || 0; + const b = buf || new Array(16); + options = options || {}; + let node = options.node || _nodeId; + let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not + // specified. We do this lazily to minimize issues related to insufficient + // system entropy. See #189 + + if (node == null || clockseq == null) { + const seedBytes = options.random || (options.rng || _rng.default)(); + + if (node == null) { + // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) + node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; + } + + if (clockseq == null) { + // Per 4.2.2, randomize (14 bit) clockseq + clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; + } + } // UUID timestamps are 100 nano-second units since the Gregorian epoch, + // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so + // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' + // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. + + + let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock + // cycle to simulate higher resolution clock + + let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) + + const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression + + if (dt < 0 && options.clockseq === undefined) { + clockseq = clockseq + 1 & 0x3fff; + } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new + // time interval + + + if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { + nsecs = 0; + } // Per 4.2.1.2 Throw error if too many uuids are requested + + + if (nsecs >= 10000) { + throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); + } + + _lastMSecs = msecs; + _lastNSecs = nsecs; + _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch + + msecs += 12219292800000; // `time_low` + + const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; + b[i++] = tl >>> 24 & 0xff; + b[i++] = tl >>> 16 & 0xff; + b[i++] = tl >>> 8 & 0xff; + b[i++] = tl & 0xff; // `time_mid` + + const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; + b[i++] = tmh >>> 8 & 0xff; + b[i++] = tmh & 0xff; // `time_high_and_version` + + b[i++] = tmh >>> 24 & 0xf | 0x10; // include version + + b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) + + b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` + + b[i++] = clockseq & 0xff; // `node` + + for (let n = 0; n < 6; ++n) { + b[i + n] = node[n]; + } + + return buf || (0, _stringify.default)(b); +} + +var _default = v1; +exports["default"] = _default; + +/***/ }), + +/***/ 6409: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _v = _interopRequireDefault(__nccwpck_require__(5998)); + +var _md = _interopRequireDefault(__nccwpck_require__(4569)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +const v3 = (0, _v.default)('v3', 0x30, _md.default); +var _default = v3; +exports["default"] = _default; + +/***/ }), + +/***/ 5998: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = _default; +exports.URL = exports.DNS = void 0; + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function stringToBytes(str) { + str = unescape(encodeURIComponent(str)); // UTF8 escape + + const bytes = []; + + for (let i = 0; i < str.length; ++i) { + bytes.push(str.charCodeAt(i)); + } + + return bytes; +} + +const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; +exports.DNS = DNS; +const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; +exports.URL = URL; + +function _default(name, version, hashfunc) { + function generateUUID(value, namespace, buf, offset) { + if (typeof value === 'string') { + value = stringToBytes(value); + } + + if (typeof namespace === 'string') { + namespace = (0, _parse.default)(namespace); + } + + if (namespace.length !== 16) { + throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); + } // Compute hash of namespace and value, Per 4.3 + // Future: Use spread syntax when supported on all platforms, e.g. `bytes = + // hashfunc([...namespace, ... value])` + + + let bytes = new Uint8Array(16 + value.length); + bytes.set(namespace); + bytes.set(value, namespace.length); + bytes = hashfunc(bytes); + bytes[6] = bytes[6] & 0x0f | version; + bytes[8] = bytes[8] & 0x3f | 0x80; + + if (buf) { + offset = offset || 0; + + for (let i = 0; i < 16; ++i) { + buf[offset + i] = bytes[i]; + } + + return buf; + } + + return (0, _stringify.default)(bytes); + } // Function#name is not settable on some platforms (#270) + + + try { + generateUUID.name = name; // eslint-disable-next-line no-empty + } catch (err) {} // For CommonJS default export support + + + generateUUID.DNS = DNS; + generateUUID.URL = URL; + return generateUUID; +} + +/***/ }), + +/***/ 5122: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { + +"use strict"; + + +Object.defineProperty(exports, "__esModule", ({ + value: true +})); +exports["default"] = void 0; + +var _rng = _interopRequireDefault(__nccwpck_require__(807)); + +var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function v4(options, buf, offset) { + options = options || {}; -var _v3 = _interopRequireDefault(__nccwpck_require__(5122)); + const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` -var _v4 = _interopRequireDefault(__nccwpck_require__(9120)); -var _nil = _interopRequireDefault(__nccwpck_require__(5332)); + rnds[6] = rnds[6] & 0x0f | 0x40; + rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided -var _version = _interopRequireDefault(__nccwpck_require__(1595)); + if (buf) { + offset = offset || 0; -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); + for (let i = 0; i < 16; ++i) { + buf[offset + i] = rnds[i]; + } -var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); + return buf; + } -var _parse = _interopRequireDefault(__nccwpck_require__(2746)); + return (0, _stringify.default)(rnds); +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _default = v4; +exports["default"] = _default; /***/ }), -/***/ 4569: +/***/ 9120: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -57066,27 +51232,20 @@ Object.defineProperty(exports, "__esModule", ({ })); exports["default"] = void 0; -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +var _v = _interopRequireDefault(__nccwpck_require__(5998)); -function md5(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } +var _sha = _interopRequireDefault(__nccwpck_require__(5274)); - return _crypto.default.createHash('md5').update(bytes).digest(); -} +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -var _default = md5; +const v5 = (0, _v.default)('v5', 0x50, _sha.default); +var _default = v5; exports["default"] = _default; /***/ }), -/***/ 5332: -/***/ ((__unused_webpack_module, exports) => { +/***/ 6900: +/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -57095,12 +51254,21 @@ Object.defineProperty(exports, "__esModule", ({ value: true })); exports["default"] = void 0; -var _default = '00000000-0000-0000-0000-000000000000'; + +var _regex = _interopRequireDefault(__nccwpck_require__(814)); + +function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } + +function validate(uuid) { + return typeof uuid === 'string' && _regex.default.test(uuid); +} + +var _default = validate; exports["default"] = _default; /***/ }), -/***/ 2746: +/***/ 1595: /***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { "use strict"; @@ -57115,2423 +51283,3726 @@ var _validate = _interopRequireDefault(__nccwpck_require__(6900)); function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } -function parse(uuid) { +function version(uuid) { if (!(0, _validate.default)(uuid)) { throw TypeError('Invalid UUID'); } - let v; - const arr = new Uint8Array(16); // Parse ########-....-....-....-............ + return parseInt(uuid.substr(14, 1), 16); +} - arr[0] = (v = parseInt(uuid.slice(0, 8), 16)) >>> 24; - arr[1] = v >>> 16 & 0xff; - arr[2] = v >>> 8 & 0xff; - arr[3] = v & 0xff; // Parse ........-####-....-....-............ +var _default = version; +exports["default"] = _default; - arr[4] = (v = parseInt(uuid.slice(9, 13), 16)) >>> 8; - arr[5] = v & 0xff; // Parse ........-....-####-....-............ +/***/ }), - arr[6] = (v = parseInt(uuid.slice(14, 18), 16)) >>> 8; - arr[7] = v & 0xff; // Parse ........-....-....-####-............ +/***/ 9491: +/***/ ((module) => { - arr[8] = (v = parseInt(uuid.slice(19, 23), 16)) >>> 8; - arr[9] = v & 0xff; // Parse ........-....-....-....-############ - // (Use "/" to avoid 32-bit truncation when bit-shifting high-order bytes) +"use strict"; +module.exports = require("assert"); - arr[10] = (v = parseInt(uuid.slice(24, 36), 16)) / 0x10000000000 & 0xff; - arr[11] = v / 0x100000000 & 0xff; - arr[12] = v >>> 24 & 0xff; - arr[13] = v >>> 16 & 0xff; - arr[14] = v >>> 8 & 0xff; - arr[15] = v & 0xff; - return arr; -} +/***/ }), -var _default = parse; -exports["default"] = _default; +/***/ 852: +/***/ ((module) => { + +"use strict"; +module.exports = require("async_hooks"); /***/ }), -/***/ 814: -/***/ ((__unused_webpack_module, exports) => { +/***/ 4300: +/***/ ((module) => { "use strict"; +module.exports = require("buffer"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; -var _default = /^(?:[0-9a-f]{8}-[0-9a-f]{4}-[1-5][0-9a-f]{3}-[89ab][0-9a-f]{3}-[0-9a-f]{12}|00000000-0000-0000-0000-000000000000)$/i; -exports["default"] = _default; +/***/ 2081: +/***/ ((module) => { + +"use strict"; +module.exports = require("child_process"); /***/ }), -/***/ 807: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 6206: +/***/ ((module) => { "use strict"; +module.exports = require("console"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = rng; +/***/ 6113: +/***/ ((module) => { -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +"use strict"; +module.exports = require("crypto"); -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ }), -const rnds8Pool = new Uint8Array(256); // # of random values to pre-allocate +/***/ 7643: +/***/ ((module) => { -let poolPtr = rnds8Pool.length; +"use strict"; +module.exports = require("diagnostics_channel"); -function rng() { - if (poolPtr > rnds8Pool.length - 16) { - _crypto.default.randomFillSync(rnds8Pool); +/***/ }), - poolPtr = 0; - } +/***/ 2361: +/***/ ((module) => { - return rnds8Pool.slice(poolPtr, poolPtr += 16); -} +"use strict"; +module.exports = require("events"); /***/ }), -/***/ 5274: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7147: +/***/ ((module) => { "use strict"; +module.exports = require("fs"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _crypto = _interopRequireDefault(__nccwpck_require__(6113)); +/***/ 3685: +/***/ ((module) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; +module.exports = require("http"); -function sha1(bytes) { - if (Array.isArray(bytes)) { - bytes = Buffer.from(bytes); - } else if (typeof bytes === 'string') { - bytes = Buffer.from(bytes, 'utf8'); - } +/***/ }), - return _crypto.default.createHash('sha1').update(bytes).digest(); -} +/***/ 5158: +/***/ ((module) => { -var _default = sha1; -exports["default"] = _default; +"use strict"; +module.exports = require("http2"); /***/ }), -/***/ 8950: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 5687: +/***/ ((module) => { "use strict"; +module.exports = require("https"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); +/***/ 1808: +/***/ ((module) => { -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +"use strict"; +module.exports = require("net"); -/** - * Convert array of 16 byte values to UUID string format of the form: - * XXXXXXXX-XXXX-XXXX-XXXX-XXXXXXXXXXXX - */ -const byteToHex = []; +/***/ }), -for (let i = 0; i < 256; ++i) { - byteToHex.push((i + 0x100).toString(16).substr(1)); -} +/***/ 5673: +/***/ ((module) => { -function stringify(arr, offset = 0) { - // Note: Be careful editing this code! It's been tuned for performance - // and works in ways you may not expect. See https://github.com/uuidjs/uuid/pull/434 - const uuid = (byteToHex[arr[offset + 0]] + byteToHex[arr[offset + 1]] + byteToHex[arr[offset + 2]] + byteToHex[arr[offset + 3]] + '-' + byteToHex[arr[offset + 4]] + byteToHex[arr[offset + 5]] + '-' + byteToHex[arr[offset + 6]] + byteToHex[arr[offset + 7]] + '-' + byteToHex[arr[offset + 8]] + byteToHex[arr[offset + 9]] + '-' + byteToHex[arr[offset + 10]] + byteToHex[arr[offset + 11]] + byteToHex[arr[offset + 12]] + byteToHex[arr[offset + 13]] + byteToHex[arr[offset + 14]] + byteToHex[arr[offset + 15]]).toLowerCase(); // Consistency check for valid UUID. If this throws, it's likely due to one - // of the following: - // - One or more input array values don't map to a hex octet (leading to - // "undefined" in the uuid) - // - Invalid input values for the RFC `version` or `variant` fields +"use strict"; +module.exports = require("node:events"); - if (!(0, _validate.default)(uuid)) { - throw TypeError('Stringified UUID is invalid'); - } +/***/ }), - return uuid; -} +/***/ 4492: +/***/ ((module) => { -var _default = stringify; -exports["default"] = _default; +"use strict"; +module.exports = require("node:stream"); /***/ }), -/***/ 8628: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 7261: +/***/ ((module) => { "use strict"; +module.exports = require("node:util"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ 2037: +/***/ ((module) => { -var _rng = _interopRequireDefault(__nccwpck_require__(807)); +"use strict"; +module.exports = require("os"); -var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 1017: +/***/ ((module) => { -// **`v1()` - Generate time-based UUID** -// -// Inspired by https://github.com/LiosK/UUID.js -// and http://docs.python.org/library/uuid.html -let _nodeId; +"use strict"; +module.exports = require("path"); -let _clockseq; // Previous uuid creation time +/***/ }), +/***/ 4074: +/***/ ((module) => { -let _lastMSecs = 0; -let _lastNSecs = 0; // See https://github.com/uuidjs/uuid for API details +"use strict"; +module.exports = require("perf_hooks"); -function v1(options, buf, offset) { - let i = buf && offset || 0; - const b = buf || new Array(16); - options = options || {}; - let node = options.node || _nodeId; - let clockseq = options.clockseq !== undefined ? options.clockseq : _clockseq; // node and clockseq need to be initialized to random values if they're not - // specified. We do this lazily to minimize issues related to insufficient - // system entropy. See #189 +/***/ }), - if (node == null || clockseq == null) { - const seedBytes = options.random || (options.rng || _rng.default)(); +/***/ 3477: +/***/ ((module) => { - if (node == null) { - // Per 4.5, create and 48-bit node id, (47 random bits + multicast bit = 1) - node = _nodeId = [seedBytes[0] | 0x01, seedBytes[1], seedBytes[2], seedBytes[3], seedBytes[4], seedBytes[5]]; - } +"use strict"; +module.exports = require("querystring"); - if (clockseq == null) { - // Per 4.2.2, randomize (14 bit) clockseq - clockseq = _clockseq = (seedBytes[6] << 8 | seedBytes[7]) & 0x3fff; - } - } // UUID timestamps are 100 nano-second units since the Gregorian epoch, - // (1582-10-15 00:00). JSNumbers aren't precise enough for this, so - // time is handled internally as 'msecs' (integer milliseconds) and 'nsecs' - // (100-nanoseconds offset from msecs) since unix epoch, 1970-01-01 00:00. +/***/ }), +/***/ 2781: +/***/ ((module) => { - let msecs = options.msecs !== undefined ? options.msecs : Date.now(); // Per 4.2.1.2, use count of uuid's generated during the current clock - // cycle to simulate higher resolution clock +"use strict"; +module.exports = require("stream"); - let nsecs = options.nsecs !== undefined ? options.nsecs : _lastNSecs + 1; // Time since last uuid creation (in msecs) +/***/ }), - const dt = msecs - _lastMSecs + (nsecs - _lastNSecs) / 10000; // Per 4.2.1.2, Bump clockseq on clock regression +/***/ 5356: +/***/ ((module) => { - if (dt < 0 && options.clockseq === undefined) { - clockseq = clockseq + 1 & 0x3fff; - } // Reset nsecs if clock regresses (new clockseq) or we've moved onto a new - // time interval +"use strict"; +module.exports = require("stream/web"); +/***/ }), - if ((dt < 0 || msecs > _lastMSecs) && options.nsecs === undefined) { - nsecs = 0; - } // Per 4.2.1.2 Throw error if too many uuids are requested +/***/ 1576: +/***/ ((module) => { +"use strict"; +module.exports = require("string_decoder"); - if (nsecs >= 10000) { - throw new Error("uuid.v1(): Can't create more than 10M uuids/sec"); - } +/***/ }), - _lastMSecs = msecs; - _lastNSecs = nsecs; - _clockseq = clockseq; // Per 4.1.4 - Convert from unix epoch to Gregorian epoch +/***/ 9512: +/***/ ((module) => { - msecs += 12219292800000; // `time_low` +"use strict"; +module.exports = require("timers"); - const tl = ((msecs & 0xfffffff) * 10000 + nsecs) % 0x100000000; - b[i++] = tl >>> 24 & 0xff; - b[i++] = tl >>> 16 & 0xff; - b[i++] = tl >>> 8 & 0xff; - b[i++] = tl & 0xff; // `time_mid` +/***/ }), - const tmh = msecs / 0x100000000 * 10000 & 0xfffffff; - b[i++] = tmh >>> 8 & 0xff; - b[i++] = tmh & 0xff; // `time_high_and_version` +/***/ 4404: +/***/ ((module) => { - b[i++] = tmh >>> 24 & 0xf | 0x10; // include version +"use strict"; +module.exports = require("tls"); - b[i++] = tmh >>> 16 & 0xff; // `clock_seq_hi_and_reserved` (Per 4.2.2 - include variant) +/***/ }), - b[i++] = clockseq >>> 8 | 0x80; // `clock_seq_low` +/***/ 7310: +/***/ ((module) => { - b[i++] = clockseq & 0xff; // `node` +"use strict"; +module.exports = require("url"); - for (let n = 0; n < 6; ++n) { - b[i + n] = node[n]; - } +/***/ }), - return buf || (0, _stringify.default)(b); -} +/***/ 3837: +/***/ ((module) => { -var _default = v1; -exports["default"] = _default; +"use strict"; +module.exports = require("util"); /***/ }), -/***/ 6409: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 9830: +/***/ ((module) => { "use strict"; +module.exports = require("util/types"); +/***/ }), -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +/***/ 1267: +/***/ ((module) => { -var _v = _interopRequireDefault(__nccwpck_require__(5998)); +"use strict"; +module.exports = require("worker_threads"); -var _md = _interopRequireDefault(__nccwpck_require__(4569)); +/***/ }), -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/***/ 9796: +/***/ ((module) => { -const v3 = (0, _v.default)('v3', 0x30, _md.default); -var _default = v3; -exports["default"] = _default; +"use strict"; +module.exports = require("zlib"); /***/ }), -/***/ 5998: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 2960: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = _default; -exports.URL = exports.DNS = void 0; +const WritableStream = (__nccwpck_require__(4492).Writable) +const inherits = (__nccwpck_require__(7261).inherits) -var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); +const StreamSearch = __nccwpck_require__(1142) -var _parse = _interopRequireDefault(__nccwpck_require__(2746)); +const PartStream = __nccwpck_require__(1620) +const HeaderParser = __nccwpck_require__(2032) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +const DASH = 45 +const B_ONEDASH = Buffer.from('-') +const B_CRLF = Buffer.from('\r\n') +const EMPTY_FN = function () {} -function stringToBytes(str) { - str = unescape(encodeURIComponent(str)); // UTF8 escape +function Dicer (cfg) { + if (!(this instanceof Dicer)) { return new Dicer(cfg) } + WritableStream.call(this, cfg) - const bytes = []; + if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } - for (let i = 0; i < str.length; ++i) { - bytes.push(str.charCodeAt(i)); + if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } + + this._headerFirst = cfg.headerFirst + + this._dashes = 0 + this._parts = 0 + this._finished = false + this._realFinish = false + this._isPreamble = true + this._justMatched = false + this._firstWrite = true + this._inHeader = true + this._part = undefined + this._cb = undefined + this._ignoreData = false + this._partOpts = { highWaterMark: cfg.partHwm } + this._pause = false + + const self = this + this._hparser = new HeaderParser(cfg) + this._hparser.on('header', function (header) { + self._inHeader = false + self._part.emit('header', header) + }) +} +inherits(Dicer, WritableStream) + +Dicer.prototype.emit = function (ev) { + if (ev === 'finish' && !this._realFinish) { + if (!this._finished) { + const self = this + process.nextTick(function () { + self.emit('error', new Error('Unexpected end of multipart data')) + if (self._part && !self._ignoreData) { + const type = (self._isPreamble ? 'Preamble' : 'Part') + self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) + self._part.push(null) + process.nextTick(function () { + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + return + } + self._realFinish = true + self.emit('finish') + self._realFinish = false + }) + } + } else { WritableStream.prototype.emit.apply(this, arguments) } +} + +Dicer.prototype._write = function (data, encoding, cb) { + // ignore unexpected data (e.g. extra trailer data after finished) + if (!this._hparser && !this._bparser) { return cb() } + + if (this._headerFirst && this._isPreamble) { + if (!this._part) { + this._part = new PartStream(this._partOpts) + if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + } + const r = this._hparser.push(data) + if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } } - return bytes; + // allows for "easier" testing + if (this._firstWrite) { + this._bparser.push(B_CRLF) + this._firstWrite = false + } + + this._bparser.push(data) + + if (this._pause) { this._cb = cb } else { cb() } +} + +Dicer.prototype.reset = function () { + this._part = undefined + this._bparser = undefined + this._hparser = undefined +} + +Dicer.prototype.setBoundary = function (boundary) { + const self = this + this._bparser = new StreamSearch('\r\n--' + boundary) + this._bparser.on('info', function (isMatch, data, start, end) { + self._oninfo(isMatch, data, start, end) + }) +} + +Dicer.prototype._ignore = function () { + if (this._part && !this._ignoreData) { + this._ignoreData = true + this._part.on('error', EMPTY_FN) + // we must perform some kind of read on the stream even though we are + // ignoring the data, otherwise node's Readable stream will not emit 'end' + // after pushing null to the stream + this._part.resume() + } } -const DNS = '6ba7b810-9dad-11d1-80b4-00c04fd430c8'; -exports.DNS = DNS; -const URL = '6ba7b811-9dad-11d1-80b4-00c04fd430c8'; -exports.URL = URL; +Dicer.prototype._oninfo = function (isMatch, data, start, end) { + let buf; const self = this; let i = 0; let r; let shouldWriteMore = true -function _default(name, version, hashfunc) { - function generateUUID(value, namespace, buf, offset) { - if (typeof value === 'string') { - value = stringToBytes(value); + if (!this._part && this._justMatched && data) { + while (this._dashes < 2 && (start + i) < end) { + if (data[start + i] === DASH) { + ++i + ++this._dashes + } else { + if (this._dashes) { buf = B_ONEDASH } + this._dashes = 0 + break + } } - - if (typeof namespace === 'string') { - namespace = (0, _parse.default)(namespace); + if (this._dashes === 2) { + if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } + this.reset() + this._finished = true + // no more parts will be added + if (self._parts === 0) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } } - - if (namespace.length !== 16) { - throw TypeError('Namespace must be array-like (16 iterable integer values, 0-255)'); - } // Compute hash of namespace and value, Per 4.3 - // Future: Use spread syntax when supported on all platforms, e.g. `bytes = - // hashfunc([...namespace, ... value])` - - - let bytes = new Uint8Array(16 + value.length); - bytes.set(namespace); - bytes.set(value, namespace.length); - bytes = hashfunc(bytes); - bytes[6] = bytes[6] & 0x0f | version; - bytes[8] = bytes[8] & 0x3f | 0x80; - - if (buf) { - offset = offset || 0; - - for (let i = 0; i < 16; ++i) { - buf[offset + i] = bytes[i]; + if (this._dashes) { return } + } + if (this._justMatched) { this._justMatched = false } + if (!this._part) { + this._part = new PartStream(this._partOpts) + this._part._read = function (n) { + self._unpause() + } + if (this._isPreamble && this.listenerCount('preamble') !== 0) { + this.emit('preamble', this._part) + } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { + this.emit('part', this._part) + } else { + this._ignore() + } + if (!this._isPreamble) { this._inHeader = true } + } + if (data && start < end && !this._ignoreData) { + if (this._isPreamble || !this._inHeader) { + if (buf) { shouldWriteMore = this._part.push(buf) } + shouldWriteMore = this._part.push(data.slice(start, end)) + if (!shouldWriteMore) { this._pause = true } + } else if (!this._isPreamble && this._inHeader) { + if (buf) { this._hparser.push(buf) } + r = this._hparser.push(data.slice(start, end)) + if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } + } + } + if (isMatch) { + this._hparser.reset() + if (this._isPreamble) { this._isPreamble = false } else { + if (start !== end) { + ++this._parts + this._part.on('end', function () { + if (--self._parts === 0) { + if (self._finished) { + self._realFinish = true + self.emit('finish') + self._realFinish = false + } else { + self._unpause() + } + } + }) } - - return buf; } + this._part.push(null) + this._part = undefined + this._ignoreData = false + this._justMatched = true + this._dashes = 0 + } +} - return (0, _stringify.default)(bytes); - } // Function#name is not settable on some platforms (#270) - +Dicer.prototype._unpause = function () { + if (!this._pause) { return } - try { - generateUUID.name = name; // eslint-disable-next-line no-empty - } catch (err) {} // For CommonJS default export support + this._pause = false + if (this._cb) { + const cb = this._cb + this._cb = undefined + cb() + } +} +module.exports = Dicer - generateUUID.DNS = DNS; - generateUUID.URL = URL; - return generateUUID; -} /***/ }), -/***/ 5122: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 2032: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _rng = _interopRequireDefault(__nccwpck_require__(807)); - -var _stringify = _interopRequireDefault(__nccwpck_require__(8950)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } - -function v4(options, buf, offset) { - options = options || {}; +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) +const getLimit = __nccwpck_require__(1467) - const rnds = options.random || (options.rng || _rng.default)(); // Per 4.4, set bits for version and `clock_seq_hi_and_reserved` +const StreamSearch = __nccwpck_require__(1142) +const B_DCRLF = Buffer.from('\r\n\r\n') +const RE_CRLF = /\r\n/g +const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex - rnds[6] = rnds[6] & 0x0f | 0x40; - rnds[8] = rnds[8] & 0x3f | 0x80; // Copy bytes to buffer, if provided +function HeaderParser (cfg) { + EventEmitter.call(this) - if (buf) { - offset = offset || 0; + cfg = cfg || {} + const self = this + this.nread = 0 + this.maxed = false + this.npairs = 0 + this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) + this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) + this.buffer = '' + this.header = {} + this.finished = false + this.ss = new StreamSearch(B_DCRLF) + this.ss.on('info', function (isMatch, data, start, end) { + if (data && !self.maxed) { + if (self.nread + end - start >= self.maxHeaderSize) { + end = self.maxHeaderSize - self.nread + start + self.nread = self.maxHeaderSize + self.maxed = true + } else { self.nread += (end - start) } - for (let i = 0; i < 16; ++i) { - buf[offset + i] = rnds[i]; + self.buffer += data.toString('binary', start, end) } - - return buf; - } - - return (0, _stringify.default)(rnds); + if (isMatch) { self._finish() } + }) } +inherits(HeaderParser, EventEmitter) -var _default = v4; -exports["default"] = _default; - -/***/ }), +HeaderParser.prototype.push = function (data) { + const r = this.ss.push(data) + if (this.finished) { return r } +} -/***/ 9120: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +HeaderParser.prototype.reset = function () { + this.finished = false + this.buffer = '' + this.header = {} + this.ss.reset() +} -"use strict"; +HeaderParser.prototype._finish = function () { + if (this.buffer) { this._parseHeader() } + this.ss.matches = this.ss.maxMatches + const header = this.header + this.header = {} + this.buffer = '' + this.finished = true + this.nread = this.npairs = 0 + this.maxed = false + this.emit('header', header) +} +HeaderParser.prototype._parseHeader = function () { + if (this.npairs === this.maxHeaderPairs) { return } -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; + const lines = this.buffer.split(RE_CRLF) + const len = lines.length + let m, h -var _v = _interopRequireDefault(__nccwpck_require__(5998)); + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (lines[i].length === 0) { continue } + if (lines[i][0] === '\t' || lines[i][0] === ' ') { + // folded header content + // RFC2822 says to just remove the CRLF and not the whitespace following + // it, so we follow the RFC and include the leading whitespace ... + if (h) { + this.header[h][this.header[h].length - 1] += lines[i] + continue + } + } -var _sha = _interopRequireDefault(__nccwpck_require__(5274)); + const posColon = lines[i].indexOf(':') + if ( + posColon === -1 || + posColon === 0 + ) { + return + } + m = RE_HDR.exec(lines[i]) + h = m[1].toLowerCase() + this.header[h] = this.header[h] || [] + this.header[h].push((m[2] || '')) + if (++this.npairs === this.maxHeaderPairs) { break } + } +} -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +module.exports = HeaderParser -const v5 = (0, _v.default)('v5', 0x50, _sha.default); -var _default = v5; -exports["default"] = _default; /***/ }), -/***/ 6900: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1620: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; +const inherits = (__nccwpck_require__(7261).inherits) +const ReadableStream = (__nccwpck_require__(4492).Readable) -var _regex = _interopRequireDefault(__nccwpck_require__(814)); +function PartStream (opts) { + ReadableStream.call(this, opts) +} +inherits(PartStream, ReadableStream) -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +PartStream.prototype._read = function (n) {} -function validate(uuid) { - return typeof uuid === 'string' && _regex.default.test(uuid); -} +module.exports = PartStream -var _default = validate; -exports["default"] = _default; /***/ }), -/***/ 1595: -/***/ ((__unused_webpack_module, exports, __nccwpck_require__) => { +/***/ 1142: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -Object.defineProperty(exports, "__esModule", ({ - value: true -})); -exports["default"] = void 0; - -var _validate = _interopRequireDefault(__nccwpck_require__(6900)); - -function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; } +/** + * Copyright Brian White. All rights reserved. + * + * @see https://github.com/mscdex/streamsearch + * + * Permission is hereby granted, free of charge, to any person obtaining a copy + * of this software and associated documentation files (the "Software"), to + * deal in the Software without restriction, including without limitation the + * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or + * sell copies of the Software, and to permit persons to whom the Software is + * furnished to do so, subject to the following conditions: + * + * The above copyright notice and this permission notice shall be included in + * all copies or substantial portions of the Software. + * + * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR + * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, + * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE + * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER + * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING + * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS + * IN THE SOFTWARE. + * + * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation + * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool + */ +const EventEmitter = (__nccwpck_require__(5673).EventEmitter) +const inherits = (__nccwpck_require__(7261).inherits) -function version(uuid) { - if (!(0, _validate.default)(uuid)) { - throw TypeError('Invalid UUID'); +function SBMH (needle) { + if (typeof needle === 'string') { + needle = Buffer.from(needle) } - return parseInt(uuid.substr(14, 1), 16); -} - -var _default = version; -exports["default"] = _default; + if (!Buffer.isBuffer(needle)) { + throw new TypeError('The needle has to be a String or a Buffer.') + } -/***/ }), + const needleLength = needle.length -/***/ 2940: -/***/ ((module) => { + if (needleLength === 0) { + throw new Error('The needle cannot be an empty String/Buffer.') + } -// Returns a wrapper function that returns a wrapped callback -// The wrapper function should do some stuff, and return a -// presumably different callback function. -// This makes sure that own properties are retained, so that -// decorations and such are not lost along the way. -module.exports = wrappy -function wrappy (fn, cb) { - if (fn && cb) return wrappy(fn)(cb) + if (needleLength > 256) { + throw new Error('The needle cannot have a length bigger than 256.') + } - if (typeof fn !== 'function') - throw new TypeError('need wrapper function') + this.maxMatches = Infinity + this.matches = 0 - Object.keys(fn).forEach(function (k) { - wrapper[k] = fn[k] - }) + this._occ = new Array(256) + .fill(needleLength) // Initialize occurrence table. + this._lookbehind_size = 0 + this._needle = needle + this._bufpos = 0 - return wrapper + this._lookbehind = Buffer.alloc(needleLength) - function wrapper() { - var args = new Array(arguments.length) - for (var i = 0; i < args.length; i++) { - args[i] = arguments[i] - } - var ret = fn.apply(this, args) - var cb = args[args.length-1] - if (typeof ret === 'function' && ret !== cb) { - Object.keys(cb).forEach(function (k) { - ret[k] = cb[k] - }) - } - return ret + // Populate occurrence table with analysis of the needle, + // ignoring last letter. + for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var + this._occ[needle[i]] = needleLength - 1 - i } } +inherits(SBMH, EventEmitter) +SBMH.prototype.reset = function () { + this._lookbehind_size = 0 + this.matches = 0 + this._bufpos = 0 +} -/***/ }), - -/***/ 2941: -/***/ ((module) => { - -module.exports = eval("require")("original-fs"); - - -/***/ }), - -/***/ 9491: -/***/ ((module) => { - -"use strict"; -module.exports = require("assert"); +SBMH.prototype.push = function (chunk, pos) { + if (!Buffer.isBuffer(chunk)) { + chunk = Buffer.from(chunk, 'binary') + } + const chlen = chunk.length + this._bufpos = pos || 0 + let r + while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } + return r +} -/***/ }), +SBMH.prototype._sbmh_feed = function (data) { + const len = data.length + const needle = this._needle + const needleLength = needle.length + const lastNeedleChar = needle[needleLength - 1] -/***/ 852: -/***/ ((module) => { + // Positive: points to a position in `data` + // pos == 3 points to data[3] + // Negative: points to a position in the lookbehind buffer + // pos == -2 points to lookbehind[lookbehind_size - 2] + let pos = -this._lookbehind_size + let ch -"use strict"; -module.exports = require("async_hooks"); + if (pos < 0) { + // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool + // search with character lookup code that considers both the + // lookbehind buffer and the current round's haystack data. + // + // Loop until + // there is a match. + // or until + // we've moved past the position that requires the + // lookbehind buffer. In this case we switch to the + // optimized loop. + // or until + // the character to look at lies outside the haystack. + while (pos < 0 && pos <= len - needleLength) { + ch = this._sbmh_lookup_char(data, pos + needleLength - 1) -/***/ }), + if ( + ch === lastNeedleChar && + this._sbmh_memcmp(data, pos, needleLength - 1) + ) { + this._lookbehind_size = 0 + ++this.matches + this.emit('info', true) -/***/ 4300: -/***/ ((module) => { + return (this._bufpos = pos + needleLength) + } + pos += this._occ[ch] + } -"use strict"; -module.exports = require("buffer"); + // No match. -/***/ }), + if (pos < 0) { + // There's too few data for Boyer-Moore-Horspool to run, + // so let's use a different algorithm to skip as much as + // we can. + // Forward pos until + // the trailing part of lookbehind + data + // looks like the beginning of the needle + // or until + // pos == 0 + while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } + } -/***/ 2081: -/***/ ((module) => { + if (pos >= 0) { + // Discard lookbehind buffer. + this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) + this._lookbehind_size = 0 + } else { + // Cut off part of the lookbehind buffer that has + // been processed and append the entire haystack + // into it. + const bytesToCutOff = this._lookbehind_size + pos + if (bytesToCutOff > 0) { + // The cut off data is guaranteed not to contain the needle. + this.emit('info', false, this._lookbehind, 0, bytesToCutOff) + } -"use strict"; -module.exports = require("child_process"); + this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, + this._lookbehind_size - bytesToCutOff) + this._lookbehind_size -= bytesToCutOff -/***/ }), + data.copy(this._lookbehind, this._lookbehind_size) + this._lookbehind_size += len -/***/ 6206: -/***/ ((module) => { + this._bufpos = len + return len + } + } -"use strict"; -module.exports = require("console"); + pos += (pos >= 0) * this._bufpos -/***/ }), + // Lookbehind buffer is now empty. We only need to check if the + // needle is in the haystack. + if (data.indexOf(needle, pos) !== -1) { + pos = data.indexOf(needle, pos) + ++this.matches + if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } -/***/ 6113: -/***/ ((module) => { + return (this._bufpos = pos + needleLength) + } else { + pos = len - needleLength + } -"use strict"; -module.exports = require("crypto"); + // There was no match. If there's trailing haystack data that we cannot + // match yet using the Boyer-Moore-Horspool algorithm (because the trailing + // data is less than the needle size) then match using a modified + // algorithm that starts matching from the beginning instead of the end. + // Whatever trailing data is left after running this algorithm is added to + // the lookbehind buffer. + while ( + pos < len && + ( + data[pos] !== needle[0] || + ( + (Buffer.compare( + data.subarray(pos, pos + len - pos), + needle.subarray(0, len - pos) + ) !== 0) + ) + ) + ) { + ++pos + } + if (pos < len) { + data.copy(this._lookbehind, 0, pos, pos + (len - pos)) + this._lookbehind_size = len - pos + } -/***/ }), + // Everything until pos is guaranteed not to contain needle data. + if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } -/***/ 7643: -/***/ ((module) => { + this._bufpos = len + return len +} -"use strict"; -module.exports = require("diagnostics_channel"); +SBMH.prototype._sbmh_lookup_char = function (data, pos) { + return (pos < 0) + ? this._lookbehind[this._lookbehind_size + pos] + : data[pos] +} -/***/ }), +SBMH.prototype._sbmh_memcmp = function (data, pos, len) { + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } + } + return true +} -/***/ 2361: -/***/ ((module) => { +module.exports = SBMH -"use strict"; -module.exports = require("events"); /***/ }), -/***/ 7147: -/***/ ((module) => { +/***/ 727: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = require("fs"); - -/***/ }), - -/***/ 3685: -/***/ ((module) => { -"use strict"; -module.exports = require("http"); -/***/ }), +const WritableStream = (__nccwpck_require__(4492).Writable) +const { inherits } = __nccwpck_require__(7261) +const Dicer = __nccwpck_require__(2960) -/***/ 5158: -/***/ ((module) => { +const MultipartParser = __nccwpck_require__(2183) +const UrlencodedParser = __nccwpck_require__(8306) +const parseParams = __nccwpck_require__(1854) -"use strict"; -module.exports = require("http2"); +function Busboy (opts) { + if (!(this instanceof Busboy)) { return new Busboy(opts) } -/***/ }), + if (typeof opts !== 'object') { + throw new TypeError('Busboy expected an options-Object.') + } + if (typeof opts.headers !== 'object') { + throw new TypeError('Busboy expected an options-Object with headers-attribute.') + } + if (typeof opts.headers['content-type'] !== 'string') { + throw new TypeError('Missing Content-Type-header.') + } -/***/ 5687: -/***/ ((module) => { + const { + headers, + ...streamOptions + } = opts -"use strict"; -module.exports = require("https"); + this.opts = { + autoDestroy: false, + ...streamOptions + } + WritableStream.call(this, this.opts) -/***/ }), + this._done = false + this._parser = this.getParserByHeaders(headers) + this._finished = false +} +inherits(Busboy, WritableStream) -/***/ 1808: -/***/ ((module) => { +Busboy.prototype.emit = function (ev) { + if (ev === 'finish') { + if (!this._done) { + this._parser?.end() + return + } else if (this._finished) { + return + } + this._finished = true + } + WritableStream.prototype.emit.apply(this, arguments) +} -"use strict"; -module.exports = require("net"); +Busboy.prototype.getParserByHeaders = function (headers) { + const parsed = parseParams(headers['content-type']) -/***/ }), + const cfg = { + defCharset: this.opts.defCharset, + fileHwm: this.opts.fileHwm, + headers, + highWaterMark: this.opts.highWaterMark, + isPartAFile: this.opts.isPartAFile, + limits: this.opts.limits, + parsedConType: parsed, + preservePath: this.opts.preservePath + } -/***/ 5673: -/***/ ((module) => { + if (MultipartParser.detect.test(parsed[0])) { + return new MultipartParser(this, cfg) + } + if (UrlencodedParser.detect.test(parsed[0])) { + return new UrlencodedParser(this, cfg) + } + throw new Error('Unsupported Content-Type.') +} -"use strict"; -module.exports = require("node:events"); +Busboy.prototype._write = function (chunk, encoding, cb) { + this._parser.write(chunk, cb) +} -/***/ }), +module.exports = Busboy +module.exports["default"] = Busboy +module.exports.Busboy = Busboy -/***/ 4492: -/***/ ((module) => { +module.exports.Dicer = Dicer -"use strict"; -module.exports = require("node:stream"); /***/ }), -/***/ 7261: -/***/ ((module) => { +/***/ 2183: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { "use strict"; -module.exports = require("node:util"); -/***/ }), -/***/ 2037: -/***/ ((module) => { +// TODO: +// * support 1 nested multipart level +// (see second multipart example here: +// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) +// * support limits.fieldNameSize +// -- this will require modifications to utils.parseParams -"use strict"; -module.exports = require("os"); +const { Readable } = __nccwpck_require__(4492) +const { inherits } = __nccwpck_require__(7261) -/***/ }), +const Dicer = __nccwpck_require__(2960) -/***/ 1017: -/***/ ((module) => { +const parseParams = __nccwpck_require__(1854) +const decodeText = __nccwpck_require__(4619) +const basename = __nccwpck_require__(8647) +const getLimit = __nccwpck_require__(1467) -"use strict"; -module.exports = require("path"); +const RE_BOUNDARY = /^boundary$/i +const RE_FIELD = /^form-data$/i +const RE_CHARSET = /^charset$/i +const RE_FILENAME = /^filename$/i +const RE_NAME = /^name$/i -/***/ }), +Multipart.detect = /^multipart\/form-data/i +function Multipart (boy, cfg) { + let i + let len + const self = this + let boundary + const limits = cfg.limits + const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) + const parsedConType = cfg.parsedConType || [] + const defCharset = cfg.defCharset || 'utf8' + const preservePath = cfg.preservePath + const fileOpts = { highWaterMark: cfg.fileHwm } -/***/ 4074: -/***/ ((module) => { + for (i = 0, len = parsedConType.length; i < len; ++i) { + if (Array.isArray(parsedConType[i]) && + RE_BOUNDARY.test(parsedConType[i][0])) { + boundary = parsedConType[i][1] + break + } + } -"use strict"; -module.exports = require("perf_hooks"); + function checkFinished () { + if (nends === 0 && finished && !boy._done) { + finished = false + self.end() + } + } -/***/ }), + if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } -/***/ 3477: -/***/ ((module) => { + const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) + const filesLimit = getLimit(limits, 'files', Infinity) + const fieldsLimit = getLimit(limits, 'fields', Infinity) + const partsLimit = getLimit(limits, 'parts', Infinity) + const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) + const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) -"use strict"; -module.exports = require("querystring"); + let nfiles = 0 + let nfields = 0 + let nends = 0 + let curFile + let curField + let finished = false -/***/ }), + this._needDrain = false + this._pause = false + this._cb = undefined + this._nparts = 0 + this._boy = boy -/***/ 4521: -/***/ ((module) => { + const parserCfg = { + boundary, + maxHeaderPairs: headerPairsLimit, + maxHeaderSize: headerSizeLimit, + partHwm: fileOpts.highWaterMark, + highWaterMark: cfg.highWaterMark + } -"use strict"; -module.exports = require("readline"); + this.parser = new Dicer(parserCfg) + this.parser.on('drain', function () { + self._needDrain = false + if (self._cb && !self._pause) { + const cb = self._cb + self._cb = undefined + cb() + } + }).on('part', function onPart (part) { + if (++self._nparts > partsLimit) { + self.parser.removeListener('part', onPart) + self.parser.on('part', skipPart) + boy.hitPartsLimit = true + boy.emit('partsLimit') + return skipPart(part) + } -/***/ }), + // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let + // us emit 'end' early since we know the part has ended if we are already + // seeing the next part + if (curField) { + const field = curField + field.emit('end') + field.removeAllListeners('end') + } -/***/ 2781: -/***/ ((module) => { + part.on('header', function (header) { + let contype + let fieldname + let parsed + let charset + let encoding + let filename + let nsize = 0 -"use strict"; -module.exports = require("stream"); + if (header['content-type']) { + parsed = parseParams(header['content-type'][0]) + if (parsed[0]) { + contype = parsed[0].toLowerCase() + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_CHARSET.test(parsed[i][0])) { + charset = parsed[i][1].toLowerCase() + break + } + } + } + } -/***/ }), + if (contype === undefined) { contype = 'text/plain' } + if (charset === undefined) { charset = defCharset } -/***/ 5356: -/***/ ((module) => { + if (header['content-disposition']) { + parsed = parseParams(header['content-disposition'][0]) + if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } + for (i = 0, len = parsed.length; i < len; ++i) { + if (RE_NAME.test(parsed[i][0])) { + fieldname = parsed[i][1] + } else if (RE_FILENAME.test(parsed[i][0])) { + filename = parsed[i][1] + if (!preservePath) { filename = basename(filename) } + } + } + } else { return skipPart(part) } -"use strict"; -module.exports = require("stream/web"); + if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } -/***/ }), + let onData, + onEnd -/***/ 1576: -/***/ ((module) => { + if (isPartAFile(fieldname, contype, filename)) { + // file/binary field + if (nfiles === filesLimit) { + if (!boy.hitFilesLimit) { + boy.hitFilesLimit = true + boy.emit('filesLimit') + } + return skipPart(part) + } -"use strict"; -module.exports = require("string_decoder"); + ++nfiles -/***/ }), + if (boy.listenerCount('file') === 0) { + self.parser._ignore() + return + } -/***/ 9512: -/***/ ((module) => { + ++nends + const file = new FileStream(fileOpts) + curFile = file + file.on('end', function () { + --nends + self._pause = false + checkFinished() + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + }) + file._read = function (n) { + if (!self._pause) { return } + self._pause = false + if (self._cb && !self._needDrain) { + const cb = self._cb + self._cb = undefined + cb() + } + } + boy.emit('file', fieldname, file, filename, encoding, contype) -"use strict"; -module.exports = require("timers"); + onData = function (data) { + if ((nsize += data.length) > fileSizeLimit) { + const extralen = fileSizeLimit - nsize + data.length + if (extralen > 0) { file.push(data.slice(0, extralen)) } + file.truncated = true + file.bytesRead = fileSizeLimit + part.removeAllListeners('data') + file.emit('limit') + return + } else if (!file.push(data)) { self._pause = true } -/***/ }), + file.bytesRead = nsize + } -/***/ 4404: -/***/ ((module) => { + onEnd = function () { + curFile = undefined + file.push(null) + } + } else { + // non-file field + if (nfields === fieldsLimit) { + if (!boy.hitFieldsLimit) { + boy.hitFieldsLimit = true + boy.emit('fieldsLimit') + } + return skipPart(part) + } -"use strict"; -module.exports = require("tls"); + ++nfields + ++nends + let buffer = '' + let truncated = false + curField = part -/***/ }), + onData = function (data) { + if ((nsize += data.length) > fieldSizeLimit) { + const extralen = (fieldSizeLimit - (nsize - data.length)) + buffer += data.toString('binary', 0, extralen) + truncated = true + part.removeAllListeners('data') + } else { buffer += data.toString('binary') } + } -/***/ 7310: -/***/ ((module) => { + onEnd = function () { + curField = undefined + if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } + boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) + --nends + checkFinished() + } + } -"use strict"; -module.exports = require("url"); + /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become + broken. Streams2/streams3 is a huge black box of confusion, but + somehow overriding the sync state seems to fix things again (and still + seems to work for previous node versions). + */ + part._readableState.sync = false -/***/ }), + part.on('data', onData) + part.on('end', onEnd) + }).on('error', function (err) { + if (curFile) { curFile.emit('error', err) } + }) + }).on('error', function (err) { + boy.emit('error', err) + }).on('finish', function () { + finished = true + checkFinished() + }) +} -/***/ 3837: -/***/ ((module) => { +Multipart.prototype.write = function (chunk, cb) { + const r = this.parser.write(chunk) + if (r && !this._pause) { + cb() + } else { + this._needDrain = !r + this._cb = cb + } +} -"use strict"; -module.exports = require("util"); +Multipart.prototype.end = function () { + const self = this -/***/ }), + if (self.parser.writable) { + self.parser.end() + } else if (!self._boy._done) { + process.nextTick(function () { + self._boy._done = true + self._boy.emit('finish') + }) + } +} -/***/ 9830: -/***/ ((module) => { +function skipPart (part) { + part.resume() +} -"use strict"; -module.exports = require("util/types"); +function FileStream (opts) { + Readable.call(this, opts) -/***/ }), + this.bytesRead = 0 -/***/ 1267: -/***/ ((module) => { + this.truncated = false +} -"use strict"; -module.exports = require("worker_threads"); +inherits(FileStream, Readable) -/***/ }), +FileStream.prototype._read = function (n) {} -/***/ 9796: -/***/ ((module) => { +module.exports = Multipart -"use strict"; -module.exports = require("zlib"); /***/ }), -/***/ 2960: +/***/ 8306: /***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; - - -const WritableStream = (__nccwpck_require__(4492).Writable) -const inherits = (__nccwpck_require__(7261).inherits) - -const StreamSearch = __nccwpck_require__(1142) - -const PartStream = __nccwpck_require__(1620) -const HeaderParser = __nccwpck_require__(2032) - -const DASH = 45 -const B_ONEDASH = Buffer.from('-') -const B_CRLF = Buffer.from('\r\n') -const EMPTY_FN = function () {} - -function Dicer (cfg) { - if (!(this instanceof Dicer)) { return new Dicer(cfg) } - WritableStream.call(this, cfg) +"use strict"; - if (!cfg || (!cfg.headerFirst && typeof cfg.boundary !== 'string')) { throw new TypeError('Boundary required') } - if (typeof cfg.boundary === 'string') { this.setBoundary(cfg.boundary) } else { this._bparser = undefined } +const Decoder = __nccwpck_require__(7100) +const decodeText = __nccwpck_require__(4619) +const getLimit = __nccwpck_require__(1467) - this._headerFirst = cfg.headerFirst +const RE_CHARSET = /^charset$/i - this._dashes = 0 - this._parts = 0 - this._finished = false - this._realFinish = false - this._isPreamble = true - this._justMatched = false - this._firstWrite = true - this._inHeader = true - this._part = undefined - this._cb = undefined - this._ignoreData = false - this._partOpts = { highWaterMark: cfg.partHwm } - this._pause = false +UrlEncoded.detect = /^application\/x-www-form-urlencoded/i +function UrlEncoded (boy, cfg) { + const limits = cfg.limits + const parsedConType = cfg.parsedConType + this.boy = boy - const self = this - this._hparser = new HeaderParser(cfg) - this._hparser.on('header', function (header) { - self._inHeader = false - self._part.emit('header', header) - }) -} -inherits(Dicer, WritableStream) + this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) + this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) + this.fieldsLimit = getLimit(limits, 'fields', Infinity) -Dicer.prototype.emit = function (ev) { - if (ev === 'finish' && !this._realFinish) { - if (!this._finished) { - const self = this - process.nextTick(function () { - self.emit('error', new Error('Unexpected end of multipart data')) - if (self._part && !self._ignoreData) { - const type = (self._isPreamble ? 'Preamble' : 'Part') - self._part.emit('error', new Error(type + ' terminated early due to unexpected end of multipart data')) - self._part.push(null) - process.nextTick(function () { - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) - return - } - self._realFinish = true - self.emit('finish') - self._realFinish = false - }) + let charset + for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var + if (Array.isArray(parsedConType[i]) && + RE_CHARSET.test(parsedConType[i][0])) { + charset = parsedConType[i][1].toLowerCase() + break } - } else { WritableStream.prototype.emit.apply(this, arguments) } -} + } -Dicer.prototype._write = function (data, encoding, cb) { - // ignore unexpected data (e.g. extra trailer data after finished) - if (!this._hparser && !this._bparser) { return cb() } + if (charset === undefined) { charset = cfg.defCharset || 'utf8' } - if (this._headerFirst && this._isPreamble) { - if (!this._part) { - this._part = new PartStream(this._partOpts) - if (this.listenerCount('preamble') !== 0) { this.emit('preamble', this._part) } else { this._ignore() } + this.decoder = new Decoder() + this.charset = charset + this._fields = 0 + this._state = 'key' + this._checkingBytes = true + this._bytesKey = 0 + this._bytesVal = 0 + this._key = '' + this._val = '' + this._keyTrunc = false + this._valTrunc = false + this._hitLimit = false +} + +UrlEncoded.prototype.write = function (data, cb) { + if (this._fields === this.fieldsLimit) { + if (!this.boy.hitFieldsLimit) { + this.boy.hitFieldsLimit = true + this.boy.emit('fieldsLimit') } - const r = this._hparser.push(data) - if (!this._inHeader && r !== undefined && r < data.length) { data = data.slice(r) } else { return cb() } + return cb() } - // allows for "easier" testing - if (this._firstWrite) { - this._bparser.push(B_CRLF) - this._firstWrite = false - } + let idxeq; let idxamp; let i; let p = 0; const len = data.length - this._bparser.push(data) + while (p < len) { + if (this._state === 'key') { + idxeq = idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x3D/* = */) { + idxeq = i + break + } else if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesKey } + } - if (this._pause) { this._cb = cb } else { cb() } -} + if (idxeq !== undefined) { + // key with assignment + if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } + this._state = 'val' -Dicer.prototype.reset = function () { - this._part = undefined - this._bparser = undefined - this._hparser = undefined -} + this._hitLimit = false + this._checkingBytes = true + this._val = '' + this._bytesVal = 0 + this._valTrunc = false + this.decoder.reset() -Dicer.prototype.setBoundary = function (boundary) { - const self = this - this._bparser = new StreamSearch('\r\n--' + boundary) - this._bparser.on('info', function (isMatch, data, start, end) { - self._oninfo(isMatch, data, start, end) - }) -} + p = idxeq + 1 + } else if (idxamp !== undefined) { + // key with no assignment + ++this._fields + let key; const keyTrunc = this._keyTrunc + if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } -Dicer.prototype._ignore = function () { - if (this._part && !this._ignoreData) { - this._ignoreData = true - this._part.on('error', EMPTY_FN) - // we must perform some kind of read on the stream even though we are - // ignoring the data, otherwise node's Readable stream will not emit 'end' - // after pushing null to the stream - this._part.resume() - } -} + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() -Dicer.prototype._oninfo = function (isMatch, data, start, end) { - let buf; const self = this; let i = 0; let r; let shouldWriteMore = true + if (key.length) { + this.boy.emit('field', decodeText(key, 'binary', this.charset), + '', + keyTrunc, + false) + } - if (!this._part && this._justMatched && data) { - while (this._dashes < 2 && (start + i) < end) { - if (data[start + i] === DASH) { - ++i - ++this._dashes + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._keyTrunc = true + } } else { - if (this._dashes) { buf = B_ONEDASH } - this._dashes = 0 - break - } - } - if (this._dashes === 2) { - if ((start + i) < end && this.listenerCount('trailer') !== 0) { this.emit('trailer', data.slice(start + i, end)) } - this.reset() - this._finished = true - // no more parts will be added - if (self._parts === 0) { - self._realFinish = true - self.emit('finish') - self._realFinish = false + if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } + p = len } - } - if (this._dashes) { return } - } - if (this._justMatched) { this._justMatched = false } - if (!this._part) { - this._part = new PartStream(this._partOpts) - this._part._read = function (n) { - self._unpause() - } - if (this._isPreamble && this.listenerCount('preamble') !== 0) { - this.emit('preamble', this._part) - } else if (this._isPreamble !== true && this.listenerCount('part') !== 0) { - this.emit('part', this._part) } else { - this._ignore() - } - if (!this._isPreamble) { this._inHeader = true } - } - if (data && start < end && !this._ignoreData) { - if (this._isPreamble || !this._inHeader) { - if (buf) { shouldWriteMore = this._part.push(buf) } - shouldWriteMore = this._part.push(data.slice(start, end)) - if (!shouldWriteMore) { this._pause = true } - } else if (!this._isPreamble && this._inHeader) { - if (buf) { this._hparser.push(buf) } - r = this._hparser.push(data.slice(start, end)) - if (!this._inHeader && r !== undefined && r < end) { this._oninfo(false, data, start + r, end) } - } - } - if (isMatch) { - this._hparser.reset() - if (this._isPreamble) { this._isPreamble = false } else { - if (start !== end) { - ++this._parts - this._part.on('end', function () { - if (--self._parts === 0) { - if (self._finished) { - self._realFinish = true - self.emit('finish') - self._realFinish = false - } else { - self._unpause() - } - } - }) + idxamp = undefined + for (i = p; i < len; ++i) { + if (!this._checkingBytes) { ++p } + if (data[i] === 0x26/* & */) { + idxamp = i + break + } + if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { + this._hitLimit = true + break + } else if (this._checkingBytes) { ++this._bytesVal } + } + + if (idxamp !== undefined) { + ++this._fields + if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) + this._state = 'key' + + this._hitLimit = false + this._checkingBytes = true + this._key = '' + this._bytesKey = 0 + this._keyTrunc = false + this.decoder.reset() + + p = idxamp + 1 + if (this._fields === this.fieldsLimit) { return cb() } + } else if (this._hitLimit) { + // we may not have hit the actual limit if there are encoded bytes... + if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } + p = i + if ((this._val === '' && this.fieldSizeLimit === 0) || + (this._bytesVal = this._val.length) === this.fieldSizeLimit) { + // yep, we actually did hit the limit + this._checkingBytes = false + this._valTrunc = true + } + } else { + if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } + p = len } } - this._part.push(null) - this._part = undefined - this._ignoreData = false - this._justMatched = true - this._dashes = 0 } + cb() } -Dicer.prototype._unpause = function () { - if (!this._pause) { return } +UrlEncoded.prototype.end = function () { + if (this.boy._done) { return } - this._pause = false - if (this._cb) { - const cb = this._cb - this._cb = undefined - cb() + if (this._state === 'key' && this._key.length > 0) { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + '', + this._keyTrunc, + false) + } else if (this._state === 'val') { + this.boy.emit('field', decodeText(this._key, 'binary', this.charset), + decodeText(this._val, 'binary', this.charset), + this._keyTrunc, + this._valTrunc) } + this.boy._done = true + this.boy.emit('finish') } -module.exports = Dicer +module.exports = UrlEncoded /***/ }), -/***/ 2032: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 7100: +/***/ ((module) => { "use strict"; -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) -const getLimit = __nccwpck_require__(1467) +const RE_PLUS = /\+/g -const StreamSearch = __nccwpck_require__(1142) +const HEX = [ + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +] -const B_DCRLF = Buffer.from('\r\n\r\n') -const RE_CRLF = /\r\n/g -const RE_HDR = /^([^:]+):[ \t]?([\x00-\xFF]+)?$/ // eslint-disable-line no-control-regex +function Decoder () { + this.buffer = undefined +} +Decoder.prototype.write = function (str) { + // Replace '+' with ' ' before decoding + str = str.replace(RE_PLUS, ' ') + let res = '' + let i = 0; let p = 0; const len = str.length + for (; i < len; ++i) { + if (this.buffer !== undefined) { + if (!HEX[str.charCodeAt(i)]) { + res += '%' + this.buffer + this.buffer = undefined + --i // retry character + } else { + this.buffer += str[i] + ++p + if (this.buffer.length === 2) { + res += String.fromCharCode(parseInt(this.buffer, 16)) + this.buffer = undefined + } + } + } else if (str[i] === '%') { + if (i > p) { + res += str.substring(p, i) + p = i + } + this.buffer = '' + ++p + } + } + if (p < len && this.buffer === undefined) { res += str.substring(p) } + return res +} +Decoder.prototype.reset = function () { + this.buffer = undefined +} -function HeaderParser (cfg) { - EventEmitter.call(this) +module.exports = Decoder - cfg = cfg || {} - const self = this - this.nread = 0 - this.maxed = false - this.npairs = 0 - this.maxHeaderPairs = getLimit(cfg, 'maxHeaderPairs', 2000) - this.maxHeaderSize = getLimit(cfg, 'maxHeaderSize', 80 * 1024) - this.buffer = '' - this.header = {} - this.finished = false - this.ss = new StreamSearch(B_DCRLF) - this.ss.on('info', function (isMatch, data, start, end) { - if (data && !self.maxed) { - if (self.nread + end - start >= self.maxHeaderSize) { - end = self.maxHeaderSize - self.nread + start - self.nread = self.maxHeaderSize - self.maxed = true - } else { self.nread += (end - start) } - self.buffer += data.toString('binary', start, end) +/***/ }), + +/***/ 8647: +/***/ ((module) => { + +"use strict"; + + +module.exports = function basename (path) { + if (typeof path !== 'string') { return '' } + for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var + switch (path.charCodeAt(i)) { + case 0x2F: // '/' + case 0x5C: // '\' + path = path.slice(i + 1) + return (path === '..' || path === '.' ? '' : path) } - if (isMatch) { self._finish() } - }) + } + return (path === '..' || path === '.' ? '' : path) } -inherits(HeaderParser, EventEmitter) -HeaderParser.prototype.push = function (data) { - const r = this.ss.push(data) - if (this.finished) { return r } -} -HeaderParser.prototype.reset = function () { - this.finished = false - this.buffer = '' - this.header = {} - this.ss.reset() -} +/***/ }), -HeaderParser.prototype._finish = function () { - if (this.buffer) { this._parseHeader() } - this.ss.matches = this.ss.maxMatches - const header = this.header - this.header = {} - this.buffer = '' - this.finished = true - this.nread = this.npairs = 0 - this.maxed = false - this.emit('header', header) -} +/***/ 4619: +/***/ (function(module) { -HeaderParser.prototype._parseHeader = function () { - if (this.npairs === this.maxHeaderPairs) { return } +"use strict"; - const lines = this.buffer.split(RE_CRLF) - const len = lines.length - let m, h - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (lines[i].length === 0) { continue } - if (lines[i][0] === '\t' || lines[i][0] === ' ') { - // folded header content - // RFC2822 says to just remove the CRLF and not the whitespace following - // it, so we follow the RFC and include the leading whitespace ... - if (h) { - this.header[h][this.header[h].length - 1] += lines[i] - continue - } - } +// Node has always utf-8 +const utf8Decoder = new TextDecoder('utf-8') +const textDecoders = new Map([ + ['utf-8', utf8Decoder], + ['utf8', utf8Decoder] +]) - const posColon = lines[i].indexOf(':') - if ( - posColon === -1 || - posColon === 0 - ) { - return +function getDecoder (charset) { + let lc + while (true) { + switch (charset) { + case 'utf-8': + case 'utf8': + return decoders.utf8 + case 'latin1': + case 'ascii': // TODO: Make these a separate, strict decoder? + case 'us-ascii': + case 'iso-8859-1': + case 'iso8859-1': + case 'iso88591': + case 'iso_8859-1': + case 'windows-1252': + case 'iso_8859-1:1987': + case 'cp1252': + case 'x-cp1252': + return decoders.latin1 + case 'utf16le': + case 'utf-16le': + case 'ucs2': + case 'ucs-2': + return decoders.utf16le + case 'base64': + return decoders.base64 + default: + if (lc === undefined) { + lc = true + charset = charset.toLowerCase() + continue + } + return decoders.other.bind(charset) } - m = RE_HDR.exec(lines[i]) - h = m[1].toLowerCase() - this.header[h] = this.header[h] || [] - this.header[h].push((m[2] || '')) - if (++this.npairs === this.maxHeaderPairs) { break } } } -module.exports = HeaderParser - - -/***/ }), +const decoders = { + utf8: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.utf8Slice(0, data.length) + }, -/***/ 1620: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { + latin1: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + return data + } + return data.latin1Slice(0, data.length) + }, -"use strict"; + utf16le: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.ucs2Slice(0, data.length) + }, + base64: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } + return data.base64Slice(0, data.length) + }, -const inherits = (__nccwpck_require__(7261).inherits) -const ReadableStream = (__nccwpck_require__(4492).Readable) + other: (data, sourceEncoding) => { + if (data.length === 0) { + return '' + } + if (typeof data === 'string') { + data = Buffer.from(data, sourceEncoding) + } -function PartStream (opts) { - ReadableStream.call(this, opts) + if (textDecoders.has(this.toString())) { + try { + return textDecoders.get(this).decode(data) + } catch {} + } + return typeof data === 'string' + ? data + : data.toString() + } } -inherits(PartStream, ReadableStream) -PartStream.prototype._read = function (n) {} +function decodeText (text, sourceEncoding, destEncoding) { + if (text) { + return getDecoder(destEncoding)(text, sourceEncoding) + } + return text +} -module.exports = PartStream +module.exports = decodeText /***/ }), -/***/ 1142: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +/***/ 1467: +/***/ ((module) => { "use strict"; -/** - * Copyright Brian White. All rights reserved. - * - * @see https://github.com/mscdex/streamsearch - * - * Permission is hereby granted, free of charge, to any person obtaining a copy - * of this software and associated documentation files (the "Software"), to - * deal in the Software without restriction, including without limitation the - * rights to use, copy, modify, merge, publish, distribute, sublicense, and/or - * sell copies of the Software, and to permit persons to whom the Software is - * furnished to do so, subject to the following conditions: - * - * The above copyright notice and this permission notice shall be included in - * all copies or substantial portions of the Software. - * - * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR - * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, - * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE - * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER - * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING - * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS - * IN THE SOFTWARE. - * - * Based heavily on the Streaming Boyer-Moore-Horspool C++ implementation - * by Hongli Lai at: https://github.com/FooBarWidget/boyer-moore-horspool - */ -const EventEmitter = (__nccwpck_require__(5673).EventEmitter) -const inherits = (__nccwpck_require__(7261).inherits) +module.exports = function getLimit (limits, name, defaultLimit) { + if ( + !limits || + limits[name] === undefined || + limits[name] === null + ) { return defaultLimit } -function SBMH (needle) { - if (typeof needle === 'string') { - needle = Buffer.from(needle) - } + if ( + typeof limits[name] !== 'number' || + isNaN(limits[name]) + ) { throw new TypeError('Limit ' + name + ' is not a valid number') } - if (!Buffer.isBuffer(needle)) { - throw new TypeError('The needle has to be a String or a Buffer.') - } + return limits[name] +} - const needleLength = needle.length - if (needleLength === 0) { - throw new Error('The needle cannot be an empty String/Buffer.') - } +/***/ }), - if (needleLength > 256) { - throw new Error('The needle cannot have a length bigger than 256.') - } +/***/ 1854: +/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - this.maxMatches = Infinity - this.matches = 0 +"use strict"; +/* eslint-disable object-property-newline */ - this._occ = new Array(256) - .fill(needleLength) // Initialize occurrence table. - this._lookbehind_size = 0 - this._needle = needle - this._bufpos = 0 - this._lookbehind = Buffer.alloc(needleLength) +const decodeText = __nccwpck_require__(4619) - // Populate occurrence table with analysis of the needle, - // ignoring last letter. - for (var i = 0; i < needleLength - 1; ++i) { // eslint-disable-line no-var - this._occ[needle[i]] = needleLength - 1 - i - } -} -inherits(SBMH, EventEmitter) +const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g -SBMH.prototype.reset = function () { - this._lookbehind_size = 0 - this.matches = 0 - this._bufpos = 0 +const EncodedLookup = { + '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', + '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', + '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', + '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', + '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', + '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', + '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', + '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', + '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', + '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', + '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', + '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', + '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', + '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', + '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', + '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', + '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', + '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', + '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', + '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', + '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', + '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', + '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', + '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', + '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', + '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', + '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', + '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', + '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', + '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', + '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', + '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', + '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', + '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', + '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', + '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', + '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', + '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', + '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', + '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', + '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', + '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', + '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', + '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', + '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', + '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', + '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', + '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', + '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', + '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', + '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', + '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', + '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', + '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', + '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', + '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', + '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', + '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', + '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', + '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', + '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', + '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', + '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', + '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', + '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', + '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', + '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', + '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', + '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', + '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', + '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', + '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', + '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', + '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', + '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', + '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', + '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', + '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', + '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', + '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', + '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', + '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', + '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', + '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', + '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', + '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', + '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', + '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', + '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', + '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', + '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', + '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', + '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', + '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', + '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', + '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', + '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' } -SBMH.prototype.push = function (chunk, pos) { - if (!Buffer.isBuffer(chunk)) { - chunk = Buffer.from(chunk, 'binary') - } - const chlen = chunk.length - this._bufpos = pos || 0 - let r - while (r !== chlen && this.matches < this.maxMatches) { r = this._sbmh_feed(chunk) } - return r +function encodedReplacer (match) { + return EncodedLookup[match] } -SBMH.prototype._sbmh_feed = function (data) { - const len = data.length - const needle = this._needle - const needleLength = needle.length - const lastNeedleChar = needle[needleLength - 1] - - // Positive: points to a position in `data` - // pos == 3 points to data[3] - // Negative: points to a position in the lookbehind buffer - // pos == -2 points to lookbehind[lookbehind_size - 2] - let pos = -this._lookbehind_size - let ch - - if (pos < 0) { - // Lookbehind buffer is not empty. Perform Boyer-Moore-Horspool - // search with character lookup code that considers both the - // lookbehind buffer and the current round's haystack data. - // - // Loop until - // there is a match. - // or until - // we've moved past the position that requires the - // lookbehind buffer. In this case we switch to the - // optimized loop. - // or until - // the character to look at lies outside the haystack. - while (pos < 0 && pos <= len - needleLength) { - ch = this._sbmh_lookup_char(data, pos + needleLength - 1) +const STATE_KEY = 0 +const STATE_VALUE = 1 +const STATE_CHARSET = 2 +const STATE_LANG = 3 - if ( - ch === lastNeedleChar && - this._sbmh_memcmp(data, pos, needleLength - 1) - ) { - this._lookbehind_size = 0 - ++this.matches - this.emit('info', true) +function parseParams (str) { + const res = [] + let state = STATE_KEY + let charset = '' + let inquote = false + let escaping = false + let p = 0 + let tmp = '' + const len = str.length - return (this._bufpos = pos + needleLength) + for (var i = 0; i < len; ++i) { // eslint-disable-line no-var + const char = str[i] + if (char === '\\' && inquote) { + if (escaping) { escaping = false } else { + escaping = true + continue } - pos += this._occ[ch] + } else if (char === '"') { + if (!escaping) { + if (inquote) { + inquote = false + state = STATE_KEY + } else { inquote = true } + continue + } else { escaping = false } + } else { + if (escaping && inquote) { tmp += '\\' } + escaping = false + if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { + if (state === STATE_CHARSET) { + state = STATE_LANG + charset = tmp.substring(1) + } else { state = STATE_VALUE } + tmp = '' + continue + } else if (state === STATE_KEY && + (char === '*' || char === '=') && + res.length) { + state = char === '*' + ? STATE_CHARSET + : STATE_VALUE + res[p] = [tmp, undefined] + tmp = '' + continue + } else if (!inquote && char === ';') { + state = STATE_KEY + if (charset) { + if (tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } + charset = '' + } else if (tmp.length) { + tmp = decodeText(tmp, 'binary', 'utf8') + } + if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } + tmp = '' + ++p + continue + } else if (!inquote && (char === ' ' || char === '\t')) { continue } } + tmp += char + } + if (charset && tmp.length) { + tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), + 'binary', + charset) + } else if (tmp) { + tmp = decodeText(tmp, 'binary', 'utf8') + } - // No match. + if (res[p] === undefined) { + if (tmp) { res[p] = tmp } + } else { res[p][1] = tmp } - if (pos < 0) { - // There's too few data for Boyer-Moore-Horspool to run, - // so let's use a different algorithm to skip as much as - // we can. - // Forward pos until - // the trailing part of lookbehind + data - // looks like the beginning of the needle - // or until - // pos == 0 - while (pos < 0 && !this._sbmh_memcmp(data, pos, len - pos)) { ++pos } - } + return res +} - if (pos >= 0) { - // Discard lookbehind buffer. - this.emit('info', false, this._lookbehind, 0, this._lookbehind_size) - this._lookbehind_size = 0 - } else { - // Cut off part of the lookbehind buffer that has - // been processed and append the entire haystack - // into it. - const bytesToCutOff = this._lookbehind_size + pos - if (bytesToCutOff > 0) { - // The cut off data is guaranteed not to contain the needle. - this.emit('info', false, this._lookbehind, 0, bytesToCutOff) - } +module.exports = parseParams - this._lookbehind.copy(this._lookbehind, 0, bytesToCutOff, - this._lookbehind_size - bytesToCutOff) - this._lookbehind_size -= bytesToCutOff - data.copy(this._lookbehind, this._lookbehind_size) - this._lookbehind_size += len +/***/ }), - this._bufpos = len - return len - } - } +/***/ 4048: +/***/ ((__unused_webpack___webpack_module__, __webpack_exports__, __nccwpck_require__) => { - pos += (pos >= 0) * this._bufpos +"use strict"; +// ESM COMPAT FLAG +__nccwpck_require__.r(__webpack_exports__); - // Lookbehind buffer is now empty. We only need to check if the - // needle is in the haystack. - if (data.indexOf(needle, pos) !== -1) { - pos = data.indexOf(needle, pos) - ++this.matches - if (pos > 0) { this.emit('info', true, data, this._bufpos, pos) } else { this.emit('info', true) } +// EXPORTS +__nccwpck_require__.d(__webpack_exports__, { + "helper": () => (/* binding */ helper), + "main": () => (/* binding */ main) +}); - return (this._bufpos = pos + needleLength) - } else { - pos = len - needleLength +// EXTERNAL MODULE: external "fs" +var external_fs_ = __nccwpck_require__(7147); +;// CONCATENATED MODULE: ./node_modules/universal-user-agent/index.js +function getUserAgent() { + if (typeof navigator === "object" && "userAgent" in navigator) { + return navigator.userAgent; } - // There was no match. If there's trailing haystack data that we cannot - // match yet using the Boyer-Moore-Horspool algorithm (because the trailing - // data is less than the needle size) then match using a modified - // algorithm that starts matching from the beginning instead of the end. - // Whatever trailing data is left after running this algorithm is added to - // the lookbehind buffer. - while ( - pos < len && - ( - data[pos] !== needle[0] || - ( - (Buffer.compare( - data.subarray(pos, pos + len - pos), - needle.subarray(0, len - pos) - ) !== 0) - ) - ) - ) { - ++pos - } - if (pos < len) { - data.copy(this._lookbehind, 0, pos, pos + (len - pos)) - this._lookbehind_size = len - pos + if (typeof process === "object" && process.version !== undefined) { + return `Node.js/${process.version.substr(1)} (${process.platform}; ${ + process.arch + })`; } - // Everything until pos is guaranteed not to contain needle data. - if (pos > 0) { this.emit('info', false, data, this._bufpos, pos < len ? pos : len) } - - this._bufpos = len - return len + return ""; } -SBMH.prototype._sbmh_lookup_char = function (data, pos) { - return (pos < 0) - ? this._lookbehind[this._lookbehind_size + pos] - : data[pos] -} +;// CONCATENATED MODULE: ./node_modules/before-after-hook/lib/register.js +// @ts-check -SBMH.prototype._sbmh_memcmp = function (data, pos, len) { - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - if (this._sbmh_lookup_char(data, pos + i) !== this._needle[i]) { return false } +function register(state, name, method, options) { + if (typeof method !== "function") { + throw new Error("method for before hook must be a function"); } - return true -} - -module.exports = SBMH - - -/***/ }), - -/***/ 727: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; + if (!options) { + options = {}; + } + if (Array.isArray(name)) { + return name.reverse().reduce((callback, name) => { + return register.bind(null, state, name, callback, options); + }, method)(); + } -const WritableStream = (__nccwpck_require__(4492).Writable) -const { inherits } = __nccwpck_require__(7261) -const Dicer = __nccwpck_require__(2960) + return Promise.resolve().then(() => { + if (!state.registry[name]) { + return method(options); + } -const MultipartParser = __nccwpck_require__(2183) -const UrlencodedParser = __nccwpck_require__(8306) -const parseParams = __nccwpck_require__(1854) + return state.registry[name].reduce((method, registered) => { + return registered.hook.bind(null, method, options); + }, method)(); + }); +} -function Busboy (opts) { - if (!(this instanceof Busboy)) { return new Busboy(opts) } +;// CONCATENATED MODULE: ./node_modules/before-after-hook/lib/add.js +// @ts-check - if (typeof opts !== 'object') { - throw new TypeError('Busboy expected an options-Object.') - } - if (typeof opts.headers !== 'object') { - throw new TypeError('Busboy expected an options-Object with headers-attribute.') +function addHook(state, kind, name, hook) { + const orig = hook; + if (!state.registry[name]) { + state.registry[name] = []; } - if (typeof opts.headers['content-type'] !== 'string') { - throw new TypeError('Missing Content-Type-header.') + + if (kind === "before") { + hook = (method, options) => { + return Promise.resolve() + .then(orig.bind(null, options)) + .then(method.bind(null, options)); + }; } - const { - headers, - ...streamOptions - } = opts + if (kind === "after") { + hook = (method, options) => { + let result; + return Promise.resolve() + .then(method.bind(null, options)) + .then((result_) => { + result = result_; + return orig(result, options); + }) + .then(() => { + return result; + }); + }; + } - this.opts = { - autoDestroy: false, - ...streamOptions + if (kind === "error") { + hook = (method, options) => { + return Promise.resolve() + .then(method.bind(null, options)) + .catch((error) => { + return orig(error, options); + }); + }; } - WritableStream.call(this, this.opts) - this._done = false - this._parser = this.getParserByHeaders(headers) - this._finished = false + state.registry[name].push({ + hook: hook, + orig: orig, + }); } -inherits(Busboy, WritableStream) -Busboy.prototype.emit = function (ev) { - if (ev === 'finish') { - if (!this._done) { - this._parser?.end() - return - } else if (this._finished) { - return - } - this._finished = true +;// CONCATENATED MODULE: ./node_modules/before-after-hook/lib/remove.js +// @ts-check + +function removeHook(state, name, method) { + if (!state.registry[name]) { + return; } - WritableStream.prototype.emit.apply(this, arguments) -} -Busboy.prototype.getParserByHeaders = function (headers) { - const parsed = parseParams(headers['content-type']) + const index = state.registry[name] + .map((registered) => { + return registered.orig; + }) + .indexOf(method); - const cfg = { - defCharset: this.opts.defCharset, - fileHwm: this.opts.fileHwm, - headers, - highWaterMark: this.opts.highWaterMark, - isPartAFile: this.opts.isPartAFile, - limits: this.opts.limits, - parsedConType: parsed, - preservePath: this.opts.preservePath + if (index === -1) { + return; } - if (MultipartParser.detect.test(parsed[0])) { - return new MultipartParser(this, cfg) - } - if (UrlencodedParser.detect.test(parsed[0])) { - return new UrlencodedParser(this, cfg) - } - throw new Error('Unsupported Content-Type.') + state.registry[name].splice(index, 1); } -Busboy.prototype._write = function (chunk, encoding, cb) { - this._parser.write(chunk, cb) -} +;// CONCATENATED MODULE: ./node_modules/before-after-hook/index.js +// @ts-check -module.exports = Busboy -module.exports["default"] = Busboy -module.exports.Busboy = Busboy -module.exports.Dicer = Dicer -/***/ }), -/***/ 2183: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { +// bind with array of arguments: https://stackoverflow.com/a/21792913 +const bind = Function.bind; +const bindable = bind.bind(bind); -"use strict"; +function bindApi(hook, state, name) { + const removeHookRef = bindable(removeHook, null).apply( + null, + name ? [state, name] : [state] + ); + hook.api = { remove: removeHookRef }; + hook.remove = removeHookRef; + ["before", "error", "after", "wrap"].forEach((kind) => { + const args = name ? [state, kind, name] : [state, kind]; + hook[kind] = hook.api[kind] = bindable(addHook, null).apply(null, args); + }); +} +function Singular() { + const singularHookName = Symbol("Singular"); + const singularHookState = { + registry: {}, + }; + const singularHook = register.bind(null, singularHookState, singularHookName); + bindApi(singularHook, singularHookState, singularHookName); + return singularHook; +} -// TODO: -// * support 1 nested multipart level -// (see second multipart example here: -// http://www.w3.org/TR/html401/interact/forms.html#didx-multipartform-data) -// * support limits.fieldNameSize -// -- this will require modifications to utils.parseParams +function Collection() { + const state = { + registry: {}, + }; -const { Readable } = __nccwpck_require__(4492) -const { inherits } = __nccwpck_require__(7261) + const hook = register.bind(null, state); + bindApi(hook, state); -const Dicer = __nccwpck_require__(2960) + return hook; +} -const parseParams = __nccwpck_require__(1854) -const decodeText = __nccwpck_require__(4619) -const basename = __nccwpck_require__(8647) -const getLimit = __nccwpck_require__(1467) +/* harmony default export */ const before_after_hook = ({ Singular, Collection }); -const RE_BOUNDARY = /^boundary$/i -const RE_FIELD = /^form-data$/i -const RE_CHARSET = /^charset$/i -const RE_FILENAME = /^filename$/i -const RE_NAME = /^name$/i +;// CONCATENATED MODULE: ./node_modules/@octokit/endpoint/dist-bundle/index.js +// pkg/dist-src/defaults.js -Multipart.detect = /^multipart\/form-data/i -function Multipart (boy, cfg) { - let i - let len - const self = this - let boundary - const limits = cfg.limits - const isPartAFile = cfg.isPartAFile || ((fieldName, contentType, fileName) => (contentType === 'application/octet-stream' || fileName !== undefined)) - const parsedConType = cfg.parsedConType || [] - const defCharset = cfg.defCharset || 'utf8' - const preservePath = cfg.preservePath - const fileOpts = { highWaterMark: cfg.fileHwm } - for (i = 0, len = parsedConType.length; i < len; ++i) { - if (Array.isArray(parsedConType[i]) && - RE_BOUNDARY.test(parsedConType[i][0])) { - boundary = parsedConType[i][1] - break - } - } +// pkg/dist-src/version.js +var VERSION = "0.0.0-development"; - function checkFinished () { - if (nends === 0 && finished && !boy._done) { - finished = false - self.end() - } +// pkg/dist-src/defaults.js +var userAgent = `octokit-endpoint.js/${VERSION} ${getUserAgent()}`; +var DEFAULTS = { + method: "GET", + baseUrl: "https://api.github.com", + headers: { + accept: "application/vnd.github.v3+json", + "user-agent": userAgent + }, + mediaType: { + format: "" } +}; - if (typeof boundary !== 'string') { throw new Error('Multipart: Boundary not found') } - - const fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - const fileSizeLimit = getLimit(limits, 'fileSize', Infinity) - const filesLimit = getLimit(limits, 'files', Infinity) - const fieldsLimit = getLimit(limits, 'fields', Infinity) - const partsLimit = getLimit(limits, 'parts', Infinity) - const headerPairsLimit = getLimit(limits, 'headerPairs', 2000) - const headerSizeLimit = getLimit(limits, 'headerSize', 80 * 1024) +// pkg/dist-src/util/lowercase-keys.js +function lowercaseKeys(object) { + if (!object) { + return {}; + } + return Object.keys(object).reduce((newObj, key) => { + newObj[key.toLowerCase()] = object[key]; + return newObj; + }, {}); +} - let nfiles = 0 - let nfields = 0 - let nends = 0 - let curFile - let curField - let finished = false +// pkg/dist-src/util/is-plain-object.js +function isPlainObject(value) { + if (typeof value !== "object" || value === null) + return false; + if (Object.prototype.toString.call(value) !== "[object Object]") + return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) + return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} - this._needDrain = false - this._pause = false - this._cb = undefined - this._nparts = 0 - this._boy = boy +// pkg/dist-src/util/merge-deep.js +function mergeDeep(defaults, options) { + const result = Object.assign({}, defaults); + Object.keys(options).forEach((key) => { + if (isPlainObject(options[key])) { + if (!(key in defaults)) + Object.assign(result, { [key]: options[key] }); + else + result[key] = mergeDeep(defaults[key], options[key]); + } else { + Object.assign(result, { [key]: options[key] }); + } + }); + return result; +} - const parserCfg = { - boundary, - maxHeaderPairs: headerPairsLimit, - maxHeaderSize: headerSizeLimit, - partHwm: fileOpts.highWaterMark, - highWaterMark: cfg.highWaterMark +// pkg/dist-src/util/remove-undefined-properties.js +function removeUndefinedProperties(obj) { + for (const key in obj) { + if (obj[key] === void 0) { + delete obj[key]; + } } + return obj; +} - this.parser = new Dicer(parserCfg) - this.parser.on('drain', function () { - self._needDrain = false - if (self._cb && !self._pause) { - const cb = self._cb - self._cb = undefined - cb() - } - }).on('part', function onPart (part) { - if (++self._nparts > partsLimit) { - self.parser.removeListener('part', onPart) - self.parser.on('part', skipPart) - boy.hitPartsLimit = true - boy.emit('partsLimit') - return skipPart(part) +// pkg/dist-src/merge.js +function merge(defaults, route, options) { + if (typeof route === "string") { + let [method, url] = route.split(" "); + options = Object.assign(url ? { method, url } : { url: method }, options); + } else { + options = Object.assign({}, route); + } + options.headers = lowercaseKeys(options.headers); + removeUndefinedProperties(options); + removeUndefinedProperties(options.headers); + const mergedOptions = mergeDeep(defaults || {}, options); + if (options.url === "/graphql") { + if (defaults && defaults.mediaType.previews?.length) { + mergedOptions.mediaType.previews = defaults.mediaType.previews.filter( + (preview) => !mergedOptions.mediaType.previews.includes(preview) + ).concat(mergedOptions.mediaType.previews); } + mergedOptions.mediaType.previews = (mergedOptions.mediaType.previews || []).map((preview) => preview.replace(/-preview/, "")); + } + return mergedOptions; +} - // hack because streams2 _always_ doesn't emit 'end' until nextTick, so let - // us emit 'end' early since we know the part has ended if we are already - // seeing the next part - if (curField) { - const field = curField - field.emit('end') - field.removeAllListeners('end') +// pkg/dist-src/util/add-query-parameters.js +function addQueryParameters(url, parameters) { + const separator = /\?/.test(url) ? "&" : "?"; + const names = Object.keys(parameters); + if (names.length === 0) { + return url; + } + return url + separator + names.map((name) => { + if (name === "q") { + return "q=" + parameters.q.split("+").map(encodeURIComponent).join("+"); } + return `${name}=${encodeURIComponent(parameters[name])}`; + }).join("&"); +} - part.on('header', function (header) { - let contype - let fieldname - let parsed - let charset - let encoding - let filename - let nsize = 0 - - if (header['content-type']) { - parsed = parseParams(header['content-type'][0]) - if (parsed[0]) { - contype = parsed[0].toLowerCase() - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_CHARSET.test(parsed[i][0])) { - charset = parsed[i][1].toLowerCase() - break - } - } - } - } - - if (contype === undefined) { contype = 'text/plain' } - if (charset === undefined) { charset = defCharset } - - if (header['content-disposition']) { - parsed = parseParams(header['content-disposition'][0]) - if (!RE_FIELD.test(parsed[0])) { return skipPart(part) } - for (i = 0, len = parsed.length; i < len; ++i) { - if (RE_NAME.test(parsed[i][0])) { - fieldname = parsed[i][1] - } else if (RE_FILENAME.test(parsed[i][0])) { - filename = parsed[i][1] - if (!preservePath) { filename = basename(filename) } - } - } - } else { return skipPart(part) } - - if (header['content-transfer-encoding']) { encoding = header['content-transfer-encoding'][0].toLowerCase() } else { encoding = '7bit' } - - let onData, - onEnd - - if (isPartAFile(fieldname, contype, filename)) { - // file/binary field - if (nfiles === filesLimit) { - if (!boy.hitFilesLimit) { - boy.hitFilesLimit = true - boy.emit('filesLimit') - } - return skipPart(part) - } +// pkg/dist-src/util/extract-url-variable-names.js +var urlVariableRegex = /\{[^}]+\}/g; +function removeNonChars(variableName) { + return variableName.replace(/^\W+|\W+$/g, "").split(/,/); +} +function extractUrlVariableNames(url) { + const matches = url.match(urlVariableRegex); + if (!matches) { + return []; + } + return matches.map(removeNonChars).reduce((a, b) => a.concat(b), []); +} - ++nfiles +// pkg/dist-src/util/omit.js +function omit(object, keysToOmit) { + const result = { __proto__: null }; + for (const key of Object.keys(object)) { + if (keysToOmit.indexOf(key) === -1) { + result[key] = object[key]; + } + } + return result; +} - if (boy.listenerCount('file') === 0) { - self.parser._ignore() - return +// pkg/dist-src/util/url-template.js +function encodeReserved(str) { + return str.split(/(%[0-9A-Fa-f]{2})/g).map(function(part) { + if (!/%[0-9A-Fa-f]/.test(part)) { + part = encodeURI(part).replace(/%5B/g, "[").replace(/%5D/g, "]"); + } + return part; + }).join(""); +} +function encodeUnreserved(str) { + return encodeURIComponent(str).replace(/[!'()*]/g, function(c) { + return "%" + c.charCodeAt(0).toString(16).toUpperCase(); + }); +} +function encodeValue(operator, value, key) { + value = operator === "+" || operator === "#" ? encodeReserved(value) : encodeUnreserved(value); + if (key) { + return encodeUnreserved(key) + "=" + value; + } else { + return value; + } +} +function isDefined(value) { + return value !== void 0 && value !== null; +} +function isKeyOperator(operator) { + return operator === ";" || operator === "&" || operator === "?"; +} +function getValues(context, operator, key, modifier) { + var value = context[key], result = []; + if (isDefined(value) && value !== "") { + if (typeof value === "string" || typeof value === "number" || typeof value === "boolean") { + value = value.toString(); + if (modifier && modifier !== "*") { + value = value.substring(0, parseInt(modifier, 10)); + } + result.push( + encodeValue(operator, value, isKeyOperator(operator) ? key : "") + ); + } else { + if (modifier === "*") { + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + result.push( + encodeValue(operator, value2, isKeyOperator(operator) ? key : "") + ); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + result.push(encodeValue(operator, value[k], k)); + } + }); } - - ++nends - const file = new FileStream(fileOpts) - curFile = file - file.on('end', function () { - --nends - self._pause = false - checkFinished() - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } - }) - file._read = function (n) { - if (!self._pause) { return } - self._pause = false - if (self._cb && !self._needDrain) { - const cb = self._cb - self._cb = undefined - cb() - } + } else { + const tmp = []; + if (Array.isArray(value)) { + value.filter(isDefined).forEach(function(value2) { + tmp.push(encodeValue(operator, value2)); + }); + } else { + Object.keys(value).forEach(function(k) { + if (isDefined(value[k])) { + tmp.push(encodeUnreserved(k)); + tmp.push(encodeValue(operator, value[k].toString())); + } + }); } - boy.emit('file', fieldname, file, filename, encoding, contype) - - onData = function (data) { - if ((nsize += data.length) > fileSizeLimit) { - const extralen = fileSizeLimit - nsize + data.length - if (extralen > 0) { file.push(data.slice(0, extralen)) } - file.truncated = true - file.bytesRead = fileSizeLimit - part.removeAllListeners('data') - file.emit('limit') - return - } else if (!file.push(data)) { self._pause = true } - - file.bytesRead = nsize + if (isKeyOperator(operator)) { + result.push(encodeUnreserved(key) + "=" + tmp.join(",")); + } else if (tmp.length !== 0) { + result.push(tmp.join(",")); } - - onEnd = function () { - curFile = undefined - file.push(null) + } + } + } else { + if (operator === ";") { + if (isDefined(value)) { + result.push(encodeUnreserved(key)); + } + } else if (value === "" && (operator === "&" || operator === "?")) { + result.push(encodeUnreserved(key) + "="); + } else if (value === "") { + result.push(""); + } + } + return result; +} +function parseUrl(template) { + return { + expand: expand.bind(null, template) + }; +} +function expand(template, context) { + var operators = ["+", "#", ".", "/", ";", "?", "&"]; + template = template.replace( + /\{([^\{\}]+)\}|([^\{\}]+)/g, + function(_, expression, literal) { + if (expression) { + let operator = ""; + const values = []; + if (operators.indexOf(expression.charAt(0)) !== -1) { + operator = expression.charAt(0); + expression = expression.substr(1); } - } else { - // non-file field - if (nfields === fieldsLimit) { - if (!boy.hitFieldsLimit) { - boy.hitFieldsLimit = true - boy.emit('fieldsLimit') + expression.split(/,/g).forEach(function(variable) { + var tmp = /([^:\*]*)(?::(\d+)|(\*))?/.exec(variable); + values.push(getValues(context, operator, tmp[1], tmp[2] || tmp[3])); + }); + if (operator && operator !== "+") { + var separator = ","; + if (operator === "?") { + separator = "&"; + } else if (operator !== "#") { + separator = operator; } - return skipPart(part) - } - - ++nfields - ++nends - let buffer = '' - let truncated = false - curField = part - - onData = function (data) { - if ((nsize += data.length) > fieldSizeLimit) { - const extralen = (fieldSizeLimit - (nsize - data.length)) - buffer += data.toString('binary', 0, extralen) - truncated = true - part.removeAllListeners('data') - } else { buffer += data.toString('binary') } - } - - onEnd = function () { - curField = undefined - if (buffer.length) { buffer = decodeText(buffer, 'binary', charset) } - boy.emit('field', fieldname, buffer, false, truncated, encoding, contype) - --nends - checkFinished() + return (values.length !== 0 ? operator : "") + values.join(separator); + } else { + return values.join(","); } + } else { + return encodeReserved(literal); } - - /* As of node@2efe4ab761666 (v0.10.29+/v0.11.14+), busboy had become - broken. Streams2/streams3 is a huge black box of confusion, but - somehow overriding the sync state seems to fix things again (and still - seems to work for previous node versions). - */ - part._readableState.sync = false - - part.on('data', onData) - part.on('end', onEnd) - }).on('error', function (err) { - if (curFile) { curFile.emit('error', err) } - }) - }).on('error', function (err) { - boy.emit('error', err) - }).on('finish', function () { - finished = true - checkFinished() - }) -} - -Multipart.prototype.write = function (chunk, cb) { - const r = this.parser.write(chunk) - if (r && !this._pause) { - cb() + } + ); + if (template === "/") { + return template; } else { - this._needDrain = !r - this._cb = cb + return template.replace(/\/$/, ""); } } -Multipart.prototype.end = function () { - const self = this - - if (self.parser.writable) { - self.parser.end() - } else if (!self._boy._done) { - process.nextTick(function () { - self._boy._done = true - self._boy.emit('finish') - }) +// pkg/dist-src/parse.js +function parse(options) { + let method = options.method.toUpperCase(); + let url = (options.url || "/").replace(/:([a-z]\w+)/g, "{$1}"); + let headers = Object.assign({}, options.headers); + let body; + let parameters = omit(options, [ + "method", + "baseUrl", + "url", + "headers", + "request", + "mediaType" + ]); + const urlVariableNames = extractUrlVariableNames(url); + url = parseUrl(url).expand(parameters); + if (!/^http/.test(url)) { + url = options.baseUrl + url; + } + const omittedParameters = Object.keys(options).filter((option) => urlVariableNames.includes(option)).concat("baseUrl"); + const remainingParameters = omit(parameters, omittedParameters); + const isBinaryRequest = /application\/octet-stream/i.test(headers.accept); + if (!isBinaryRequest) { + if (options.mediaType.format) { + headers.accept = headers.accept.split(/,/).map( + (format) => format.replace( + /application\/vnd(\.\w+)(\.v3)?(\.\w+)?(\+json)?$/, + `application/vnd$1$2.${options.mediaType.format}` + ) + ).join(","); + } + if (url.endsWith("/graphql")) { + if (options.mediaType.previews?.length) { + const previewsFromAcceptHeader = headers.accept.match(/[\w-]+(?=-preview)/g) || []; + headers.accept = previewsFromAcceptHeader.concat(options.mediaType.previews).map((preview) => { + const format = options.mediaType.format ? `.${options.mediaType.format}` : "+json"; + return `application/vnd.github.${preview}-preview${format}`; + }).join(","); + } + } + } + if (["GET", "HEAD"].includes(method)) { + url = addQueryParameters(url, remainingParameters); + } else { + if ("data" in remainingParameters) { + body = remainingParameters.data; + } else { + if (Object.keys(remainingParameters).length) { + body = remainingParameters; + } + } + } + if (!headers["content-type"] && typeof body !== "undefined") { + headers["content-type"] = "application/json; charset=utf-8"; + } + if (["PATCH", "PUT"].includes(method) && typeof body === "undefined") { + body = ""; } + return Object.assign( + { method, url, headers }, + typeof body !== "undefined" ? { body } : null, + options.request ? { request: options.request } : null + ); } -function skipPart (part) { - part.resume() +// pkg/dist-src/endpoint-with-defaults.js +function endpointWithDefaults(defaults, route, options) { + return parse(merge(defaults, route, options)); } -function FileStream (opts) { - Readable.call(this, opts) - - this.bytesRead = 0 - - this.truncated = false +// pkg/dist-src/with-defaults.js +function withDefaults(oldDefaults, newDefaults) { + const DEFAULTS2 = merge(oldDefaults, newDefaults); + const endpoint2 = endpointWithDefaults.bind(null, DEFAULTS2); + return Object.assign(endpoint2, { + DEFAULTS: DEFAULTS2, + defaults: withDefaults.bind(null, DEFAULTS2), + merge: merge.bind(null, DEFAULTS2), + parse + }); } -inherits(FileStream, Readable) - -FileStream.prototype._read = function (n) {} - -module.exports = Multipart - - -/***/ }), - -/***/ 8306: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { - -"use strict"; - - -const Decoder = __nccwpck_require__(7100) -const decodeText = __nccwpck_require__(4619) -const getLimit = __nccwpck_require__(1467) - -const RE_CHARSET = /^charset$/i - -UrlEncoded.detect = /^application\/x-www-form-urlencoded/i -function UrlEncoded (boy, cfg) { - const limits = cfg.limits - const parsedConType = cfg.parsedConType - this.boy = boy +// pkg/dist-src/index.js +var endpoint = withDefaults(null, DEFAULTS); - this.fieldSizeLimit = getLimit(limits, 'fieldSize', 1 * 1024 * 1024) - this.fieldNameSizeLimit = getLimit(limits, 'fieldNameSize', 100) - this.fieldsLimit = getLimit(limits, 'fields', Infinity) - let charset - for (var i = 0, len = parsedConType.length; i < len; ++i) { // eslint-disable-line no-var - if (Array.isArray(parsedConType[i]) && - RE_CHARSET.test(parsedConType[i][0])) { - charset = parsedConType[i][1].toLowerCase() - break +;// CONCATENATED MODULE: ./node_modules/@octokit/request-error/dist-src/index.js +class RequestError extends Error { + name; + /** + * http status code + */ + status; + /** + * Request options that lead to the error. + */ + request; + /** + * Response object if a response was received + */ + response; + constructor(message, statusCode, options) { + super(message); + this.name = "HttpError"; + this.status = Number.parseInt(statusCode); + if (Number.isNaN(this.status)) { + this.status = 0; } - } - - if (charset === undefined) { charset = cfg.defCharset || 'utf8' } - - this.decoder = new Decoder() - this.charset = charset - this._fields = 0 - this._state = 'key' - this._checkingBytes = true - this._bytesKey = 0 - this._bytesVal = 0 - this._key = '' - this._val = '' - this._keyTrunc = false - this._valTrunc = false - this._hitLimit = false -} - -UrlEncoded.prototype.write = function (data, cb) { - if (this._fields === this.fieldsLimit) { - if (!this.boy.hitFieldsLimit) { - this.boy.hitFieldsLimit = true - this.boy.emit('fieldsLimit') + if ("response" in options) { + this.response = options.response; } - return cb() + const requestCopy = Object.assign({}, options.request); + if (options.request.headers.authorization) { + requestCopy.headers = Object.assign({}, options.request.headers, { + authorization: options.request.headers.authorization.replace( + / .*$/, + " [REDACTED]" + ) + }); + } + requestCopy.url = requestCopy.url.replace(/\bclient_secret=\w+/g, "client_secret=[REDACTED]").replace(/\baccess_token=\w+/g, "access_token=[REDACTED]"); + this.request = requestCopy; } +} - let idxeq; let idxamp; let i; let p = 0; const len = data.length - - while (p < len) { - if (this._state === 'key') { - idxeq = idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x3D/* = */) { - idxeq = i - break - } else if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesKey === this.fieldNameSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesKey } - } - if (idxeq !== undefined) { - // key with assignment - if (idxeq > p) { this._key += this.decoder.write(data.toString('binary', p, idxeq)) } - this._state = 'val' +;// CONCATENATED MODULE: ./node_modules/@octokit/request/dist-bundle/index.js +// pkg/dist-src/index.js - this._hitLimit = false - this._checkingBytes = true - this._val = '' - this._bytesVal = 0 - this._valTrunc = false - this.decoder.reset() - p = idxeq + 1 - } else if (idxamp !== undefined) { - // key with no assignment - ++this._fields - let key; const keyTrunc = this._keyTrunc - if (idxamp > p) { key = (this._key += this.decoder.write(data.toString('binary', p, idxamp))) } else { key = this._key } +// pkg/dist-src/defaults.js - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() - if (key.length) { - this.boy.emit('field', decodeText(key, 'binary', this.charset), - '', - keyTrunc, - false) - } +// pkg/dist-src/version.js +var dist_bundle_VERSION = "0.0.0-development"; - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._key += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._bytesKey = this._key.length) === this.fieldNameSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._keyTrunc = true - } - } else { - if (p < len) { this._key += this.decoder.write(data.toString('binary', p)) } - p = len - } - } else { - idxamp = undefined - for (i = p; i < len; ++i) { - if (!this._checkingBytes) { ++p } - if (data[i] === 0x26/* & */) { - idxamp = i - break - } - if (this._checkingBytes && this._bytesVal === this.fieldSizeLimit) { - this._hitLimit = true - break - } else if (this._checkingBytes) { ++this._bytesVal } - } +// pkg/dist-src/defaults.js +var defaults_default = { + headers: { + "user-agent": `octokit-request.js/${dist_bundle_VERSION} ${getUserAgent()}` + } +}; - if (idxamp !== undefined) { - ++this._fields - if (idxamp > p) { this._val += this.decoder.write(data.toString('binary', p, idxamp)) } - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) - this._state = 'key' +// pkg/dist-src/is-plain-object.js +function dist_bundle_isPlainObject(value) { + if (typeof value !== "object" || value === null) return false; + if (Object.prototype.toString.call(value) !== "[object Object]") return false; + const proto = Object.getPrototypeOf(value); + if (proto === null) return true; + const Ctor = Object.prototype.hasOwnProperty.call(proto, "constructor") && proto.constructor; + return typeof Ctor === "function" && Ctor instanceof Ctor && Function.prototype.call(Ctor) === Function.prototype.call(value); +} - this._hitLimit = false - this._checkingBytes = true - this._key = '' - this._bytesKey = 0 - this._keyTrunc = false - this.decoder.reset() +// pkg/dist-src/fetch-wrapper.js - p = idxamp + 1 - if (this._fields === this.fieldsLimit) { return cb() } - } else if (this._hitLimit) { - // we may not have hit the actual limit if there are encoded bytes... - if (i > p) { this._val += this.decoder.write(data.toString('binary', p, i)) } - p = i - if ((this._val === '' && this.fieldSizeLimit === 0) || - (this._bytesVal = this._val.length) === this.fieldSizeLimit) { - // yep, we actually did hit the limit - this._checkingBytes = false - this._valTrunc = true +async function fetchWrapper(requestOptions) { + const fetch = requestOptions.request?.fetch || globalThis.fetch; + if (!fetch) { + throw new Error( + "fetch is not set. Please pass a fetch implementation as new Octokit({ request: { fetch }}). Learn more at https://github.com/octokit/octokit.js/#fetch-missing" + ); + } + const log = requestOptions.request?.log || console; + const parseSuccessResponseBody = requestOptions.request?.parseSuccessResponseBody !== false; + const body = dist_bundle_isPlainObject(requestOptions.body) || Array.isArray(requestOptions.body) ? JSON.stringify(requestOptions.body) : requestOptions.body; + const requestHeaders = Object.fromEntries( + Object.entries(requestOptions.headers).map(([name, value]) => [ + name, + String(value) + ]) + ); + let fetchResponse; + try { + fetchResponse = await fetch(requestOptions.url, { + method: requestOptions.method, + body, + redirect: requestOptions.request?.redirect, + headers: requestHeaders, + signal: requestOptions.request?.signal, + // duplex must be set if request.body is ReadableStream or Async Iterables. + // See https://fetch.spec.whatwg.org/#dom-requestinit-duplex. + ...requestOptions.body && { duplex: "half" } + }); + } catch (error) { + let message = "Unknown Error"; + if (error instanceof Error) { + if (error.name === "AbortError") { + error.status = 500; + throw error; + } + message = error.message; + if (error.name === "TypeError" && "cause" in error) { + if (error.cause instanceof Error) { + message = error.cause.message; + } else if (typeof error.cause === "string") { + message = error.cause; } - } else { - if (p < len) { this._val += this.decoder.write(data.toString('binary', p)) } - p = len } } + const requestError = new RequestError(message, 500, { + request: requestOptions + }); + requestError.cause = error; + throw requestError; + } + const status = fetchResponse.status; + const url = fetchResponse.url; + const responseHeaders = {}; + for (const [key, value] of fetchResponse.headers) { + responseHeaders[key] = value; + } + const octokitResponse = { + url, + status, + headers: responseHeaders, + data: "" + }; + if ("deprecation" in responseHeaders) { + const matches = responseHeaders.link && responseHeaders.link.match(/<([^>]+)>; rel="deprecation"/); + const deprecationLink = matches && matches.pop(); + log.warn( + `[@octokit/request] "${requestOptions.method} ${requestOptions.url}" is deprecated. It is scheduled to be removed on ${responseHeaders.sunset}${deprecationLink ? `. See ${deprecationLink}` : ""}` + ); } - cb() -} - -UrlEncoded.prototype.end = function () { - if (this.boy._done) { return } - - if (this._state === 'key' && this._key.length > 0) { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - '', - this._keyTrunc, - false) - } else if (this._state === 'val') { - this.boy.emit('field', decodeText(this._key, 'binary', this.charset), - decodeText(this._val, 'binary', this.charset), - this._keyTrunc, - this._valTrunc) + if (status === 204 || status === 205) { + return octokitResponse; } - this.boy._done = true - this.boy.emit('finish') + if (requestOptions.method === "HEAD") { + if (status < 400) { + return octokitResponse; + } + throw new RequestError(fetchResponse.statusText, status, { + response: octokitResponse, + request: requestOptions + }); + } + if (status === 304) { + octokitResponse.data = await getResponseData(fetchResponse); + throw new RequestError("Not modified", status, { + response: octokitResponse, + request: requestOptions + }); + } + if (status >= 400) { + octokitResponse.data = await getResponseData(fetchResponse); + throw new RequestError(toErrorMessage(octokitResponse.data), status, { + response: octokitResponse, + request: requestOptions + }); + } + octokitResponse.data = parseSuccessResponseBody ? await getResponseData(fetchResponse) : fetchResponse.body; + return octokitResponse; } - -module.exports = UrlEncoded - - -/***/ }), - -/***/ 7100: -/***/ ((module) => { - -"use strict"; - - -const RE_PLUS = /\+/g - -const HEX = [ - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 1, 1, 1, 1, 1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, - 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 -] - -function Decoder () { - this.buffer = undefined +async function getResponseData(response) { + const contentType = response.headers.get("content-type"); + if (/application\/json/.test(contentType)) { + return response.json().catch(() => response.text()).catch(() => ""); + } + if (!contentType || /^text\/|charset=utf-8$/.test(contentType)) { + return response.text(); + } + return response.arrayBuffer(); } -Decoder.prototype.write = function (str) { - // Replace '+' with ' ' before decoding - str = str.replace(RE_PLUS, ' ') - let res = '' - let i = 0; let p = 0; const len = str.length - for (; i < len; ++i) { - if (this.buffer !== undefined) { - if (!HEX[str.charCodeAt(i)]) { - res += '%' + this.buffer - this.buffer = undefined - --i // retry character - } else { - this.buffer += str[i] - ++p - if (this.buffer.length === 2) { - res += String.fromCharCode(parseInt(this.buffer, 16)) - this.buffer = undefined - } - } - } else if (str[i] === '%') { - if (i > p) { - res += str.substring(p, i) - p = i - } - this.buffer = '' - ++p - } +function toErrorMessage(data) { + if (typeof data === "string") { + return data; } - if (p < len && this.buffer === undefined) { res += str.substring(p) } - return res + if (data instanceof ArrayBuffer) { + return "Unknown error"; + } + if ("message" in data) { + const suffix = "documentation_url" in data ? ` - ${data.documentation_url}` : ""; + return Array.isArray(data.errors) ? `${data.message}: ${data.errors.map((v) => JSON.stringify(v)).join(", ")}${suffix}` : `${data.message}${suffix}`; + } + return `Unknown error: ${JSON.stringify(data)}`; } -Decoder.prototype.reset = function () { - this.buffer = undefined + +// pkg/dist-src/with-defaults.js +function dist_bundle_withDefaults(oldEndpoint, newDefaults) { + const endpoint2 = oldEndpoint.defaults(newDefaults); + const newApi = function(route, parameters) { + const endpointOptions = endpoint2.merge(route, parameters); + if (!endpointOptions.request || !endpointOptions.request.hook) { + return fetchWrapper(endpoint2.parse(endpointOptions)); + } + const request2 = (route2, parameters2) => { + return fetchWrapper( + endpoint2.parse(endpoint2.merge(route2, parameters2)) + ); + }; + Object.assign(request2, { + endpoint: endpoint2, + defaults: dist_bundle_withDefaults.bind(null, endpoint2) + }); + return endpointOptions.request.hook(request2, endpointOptions); + }; + return Object.assign(newApi, { + endpoint: endpoint2, + defaults: dist_bundle_withDefaults.bind(null, endpoint2) + }); } -module.exports = Decoder - - -/***/ }), - -/***/ 8647: -/***/ ((module) => { +// pkg/dist-src/index.js +var request = dist_bundle_withDefaults(endpoint, defaults_default); -"use strict"; +;// CONCATENATED MODULE: ./node_modules/@octokit/graphql/dist-bundle/index.js +// pkg/dist-src/index.js -module.exports = function basename (path) { - if (typeof path !== 'string') { return '' } - for (var i = path.length - 1; i >= 0; --i) { // eslint-disable-line no-var - switch (path.charCodeAt(i)) { - case 0x2F: // '/' - case 0x5C: // '\' - path = path.slice(i + 1) - return (path === '..' || path === '.' ? '' : path) - } - } - return (path === '..' || path === '.' ? '' : path) -} -/***/ }), +// pkg/dist-src/version.js +var graphql_dist_bundle_VERSION = "0.0.0-development"; -/***/ 4619: -/***/ (function(module) { +// pkg/dist-src/with-defaults.js -"use strict"; +// pkg/dist-src/graphql.js -// Node has always utf-8 -const utf8Decoder = new TextDecoder('utf-8') -const textDecoders = new Map([ - ['utf-8', utf8Decoder], - ['utf8', utf8Decoder] -]) -function getDecoder (charset) { - let lc - while (true) { - switch (charset) { - case 'utf-8': - case 'utf8': - return decoders.utf8 - case 'latin1': - case 'ascii': // TODO: Make these a separate, strict decoder? - case 'us-ascii': - case 'iso-8859-1': - case 'iso8859-1': - case 'iso88591': - case 'iso_8859-1': - case 'windows-1252': - case 'iso_8859-1:1987': - case 'cp1252': - case 'x-cp1252': - return decoders.latin1 - case 'utf16le': - case 'utf-16le': - case 'ucs2': - case 'ucs-2': - return decoders.utf16le - case 'base64': - return decoders.base64 - default: - if (lc === undefined) { - lc = true - charset = charset.toLowerCase() - continue - } - return decoders.other.bind(charset) +// pkg/dist-src/error.js +function _buildMessageForResponseErrors(data) { + return `Request failed due to following response errors: +` + data.errors.map((e) => ` - ${e.message}`).join("\n"); +} +var GraphqlResponseError = class extends Error { + constructor(request2, headers, response) { + super(_buildMessageForResponseErrors(response)); + this.request = request2; + this.headers = headers; + this.response = response; + this.errors = response.errors; + this.data = response.data; + if (Error.captureStackTrace) { + Error.captureStackTrace(this, this.constructor); } } -} + name = "GraphqlResponseError"; + errors; + data; +}; -const decoders = { - utf8: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) +// pkg/dist-src/graphql.js +var NON_VARIABLE_OPTIONS = [ + "method", + "baseUrl", + "url", + "headers", + "request", + "query", + "mediaType" +]; +var FORBIDDEN_VARIABLE_OPTIONS = ["query", "method", "url"]; +var GHES_V3_SUFFIX_REGEX = /\/api\/v3\/?$/; +function graphql(request2, query, options) { + if (options) { + if (typeof query === "string" && "query" in options) { + return Promise.reject( + new Error(`[@octokit/graphql] "query" cannot be used as variable name`) + ); } - return data.utf8Slice(0, data.length) - }, - - latin1: (data, sourceEncoding) => { - if (data.length === 0) { - return '' + for (const key in options) { + if (!FORBIDDEN_VARIABLE_OPTIONS.includes(key)) + continue; + return Promise.reject( + new Error( + `[@octokit/graphql] "${key}" cannot be used as variable name` + ) + ); } - if (typeof data === 'string') { - return data + } + const parsedOptions = typeof query === "string" ? Object.assign({ query }, options) : query; + const requestOptions = Object.keys( + parsedOptions + ).reduce((result, key) => { + if (NON_VARIABLE_OPTIONS.includes(key)) { + result[key] = parsedOptions[key]; + return result; } - return data.latin1Slice(0, data.length) - }, - - utf16le: (data, sourceEncoding) => { - if (data.length === 0) { - return '' + if (!result.variables) { + result.variables = {}; } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) + result.variables[key] = parsedOptions[key]; + return result; + }, {}); + const baseUrl = parsedOptions.baseUrl || request2.endpoint.DEFAULTS.baseUrl; + if (GHES_V3_SUFFIX_REGEX.test(baseUrl)) { + requestOptions.url = baseUrl.replace(GHES_V3_SUFFIX_REGEX, "/api/graphql"); + } + return request2(requestOptions).then((response) => { + if (response.data.errors) { + const headers = {}; + for (const key of Object.keys(response.headers)) { + headers[key] = response.headers[key]; + } + throw new GraphqlResponseError( + requestOptions, + headers, + response.data + ); } - return data.ucs2Slice(0, data.length) - }, + return response.data.data; + }); +} - base64: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - return data.base64Slice(0, data.length) +// pkg/dist-src/with-defaults.js +function graphql_dist_bundle_withDefaults(request2, newDefaults) { + const newRequest = request2.defaults(newDefaults); + const newApi = (query, options) => { + return graphql(newRequest, query, options); + }; + return Object.assign(newApi, { + defaults: graphql_dist_bundle_withDefaults.bind(null, newRequest), + endpoint: newRequest.endpoint + }); +} + +// pkg/dist-src/index.js +var graphql2 = graphql_dist_bundle_withDefaults(request, { + headers: { + "user-agent": `octokit-graphql.js/${graphql_dist_bundle_VERSION} ${getUserAgent()}` }, + method: "POST", + url: "/graphql" +}); +function withCustomRequest(customRequest) { + return graphql_dist_bundle_withDefaults(customRequest, { + method: "POST", + url: "/graphql" + }); +} - other: (data, sourceEncoding) => { - if (data.length === 0) { - return '' - } - if (typeof data === 'string') { - data = Buffer.from(data, sourceEncoding) - } - if (textDecoders.has(this.toString())) { - try { - return textDecoders.get(this).decode(data) - } catch {} - } - return typeof data === 'string' - ? data - : data.toString() - } +;// CONCATENATED MODULE: ./node_modules/@octokit/auth-token/dist-bundle/index.js +// pkg/dist-src/auth.js +var REGEX_IS_INSTALLATION_LEGACY = /^v1\./; +var REGEX_IS_INSTALLATION = /^ghs_/; +var REGEX_IS_USER_TO_SERVER = /^ghu_/; +async function auth(token) { + const isApp = token.split(/\./).length === 3; + const isInstallation = REGEX_IS_INSTALLATION_LEGACY.test(token) || REGEX_IS_INSTALLATION.test(token); + const isUserToServer = REGEX_IS_USER_TO_SERVER.test(token); + const tokenType = isApp ? "app" : isInstallation ? "installation" : isUserToServer ? "user-to-server" : "oauth"; + return { + type: "token", + token, + tokenType + }; } -function decodeText (text, sourceEncoding, destEncoding) { - if (text) { - return getDecoder(destEncoding)(text, sourceEncoding) +// pkg/dist-src/with-authorization-prefix.js +function withAuthorizationPrefix(token) { + if (token.split(/\./).length === 3) { + return `bearer ${token}`; } - return text + return `token ${token}`; } -module.exports = decodeText +// pkg/dist-src/hook.js +async function hook(token, request, route, parameters) { + const endpoint = request.endpoint.merge( + route, + parameters + ); + endpoint.headers.authorization = withAuthorizationPrefix(token); + return request(endpoint); +} + +// pkg/dist-src/index.js +var createTokenAuth = function createTokenAuth2(token) { + if (!token) { + throw new Error("[@octokit/auth-token] No token passed to createTokenAuth"); + } + if (typeof token !== "string") { + throw new Error( + "[@octokit/auth-token] Token passed to createTokenAuth is not a string" + ); + } + token = token.replace(/^(token|bearer) +/i, ""); + return Object.assign(auth.bind(null, token), { + hook: hook.bind(null, token) + }); +}; -/***/ }), +;// CONCATENATED MODULE: ./node_modules/@octokit/core/dist-src/version.js +const version_VERSION = "6.1.2"; -/***/ 1467: -/***/ ((module) => { -"use strict"; +;// CONCATENATED MODULE: ./node_modules/@octokit/core/dist-src/index.js -module.exports = function getLimit (limits, name, defaultLimit) { - if ( - !limits || - limits[name] === undefined || - limits[name] === null - ) { return defaultLimit } - if ( - typeof limits[name] !== 'number' || - isNaN(limits[name]) - ) { throw new TypeError('Limit ' + name + ' is not a valid number') } - return limits[name] -} -/***/ }), +const noop = () => { +}; +const consoleWarn = console.warn.bind(console); +const consoleError = console.error.bind(console); +const userAgentTrail = `octokit-core.js/${version_VERSION} ${getUserAgent()}`; +class Octokit { + static VERSION = version_VERSION; + static defaults(defaults) { + const OctokitWithDefaults = class extends this { + constructor(...args) { + const options = args[0] || {}; + if (typeof defaults === "function") { + super(defaults(options)); + return; + } + super( + Object.assign( + {}, + defaults, + options, + options.userAgent && defaults.userAgent ? { + userAgent: `${options.userAgent} ${defaults.userAgent}` + } : null + ) + ); + } + }; + return OctokitWithDefaults; + } + static plugins = []; + /** + * Attach a plugin (or many) to your Octokit instance. + * + * @example + * const API = Octokit.plugin(plugin1, plugin2, plugin3, ...) + */ + static plugin(...newPlugins) { + const currentPlugins = this.plugins; + const NewOctokit = class extends this { + static plugins = currentPlugins.concat( + newPlugins.filter((plugin) => !currentPlugins.includes(plugin)) + ); + }; + return NewOctokit; + } + constructor(options = {}) { + const hook = new before_after_hook.Collection(); + const requestDefaults = { + baseUrl: request.endpoint.DEFAULTS.baseUrl, + headers: {}, + request: Object.assign({}, options.request, { + // @ts-ignore internal usage only, no need to type + hook: hook.bind(null, "request") + }), + mediaType: { + previews: [], + format: "" + } + }; + requestDefaults.headers["user-agent"] = options.userAgent ? `${options.userAgent} ${userAgentTrail}` : userAgentTrail; + if (options.baseUrl) { + requestDefaults.baseUrl = options.baseUrl; + } + if (options.previews) { + requestDefaults.mediaType.previews = options.previews; + } + if (options.timeZone) { + requestDefaults.headers["time-zone"] = options.timeZone; + } + this.request = request.defaults(requestDefaults); + this.graphql = withCustomRequest(this.request).defaults(requestDefaults); + this.log = Object.assign( + { + debug: noop, + info: noop, + warn: consoleWarn, + error: consoleError + }, + options.log + ); + this.hook = hook; + if (!options.authStrategy) { + if (!options.auth) { + this.auth = async () => ({ + type: "unauthenticated" + }); + } else { + const auth = createTokenAuth(options.auth); + hook.wrap("request", auth.hook); + this.auth = auth; + } + } else { + const { authStrategy, ...otherOptions } = options; + const auth = authStrategy( + Object.assign( + { + request: this.request, + log: this.log, + // we pass the current octokit instance as well as its constructor options + // to allow for authentication strategies that return a new octokit instance + // that shares the same internal state as the current one. The original + // requirement for this was the "event-octokit" authentication strategy + // of https://github.com/probot/octokit-auth-probot. + octokit: this, + octokitOptions: otherOptions + }, + options.auth + ) + ); + hook.wrap("request", auth.hook); + this.auth = auth; + } + const classConstructor = this.constructor; + for (let i = 0; i < classConstructor.plugins.length; ++i) { + Object.assign(this, classConstructor.plugins[i](this, options)); + } + } + // assigned during constructor + request; + graphql; + log; + hook; + // TODO: type `octokit.auth` based on passed options.authStrategy + auth; +} -/***/ 1854: -/***/ ((module, __unused_webpack_exports, __nccwpck_require__) => { -"use strict"; -/* eslint-disable object-property-newline */ +// EXTERNAL MODULE: ./node_modules/bottleneck/light.js +var light = __nccwpck_require__(1174); +;// CONCATENATED MODULE: ./node_modules/@octokit/plugin-throttling/dist-bundle/index.js +// pkg/dist-src/index.js -const decodeText = __nccwpck_require__(4619) +// pkg/dist-src/version.js +var plugin_throttling_dist_bundle_VERSION = "0.0.0-development"; + +// pkg/dist-src/wrap-request.js +var dist_bundle_noop = () => Promise.resolve(); +function wrapRequest(state, request, options) { + return state.retryLimiter.schedule(doRequest, state, request, options); +} +async function doRequest(state, request, options) { + const isWrite = options.method !== "GET" && options.method !== "HEAD"; + const { pathname } = new URL(options.url, "http://github.test"); + const isSearch = options.method === "GET" && pathname.startsWith("/search/"); + const isGraphQL = pathname.startsWith("/graphql"); + const retryCount = ~~request.retryCount; + const jobOptions = retryCount > 0 ? { priority: 0, weight: 0 } : {}; + if (state.clustering) { + jobOptions.expiration = 1e3 * 60; + } + if (isWrite || isGraphQL) { + await state.write.key(state.id).schedule(jobOptions, dist_bundle_noop); + } + if (isWrite && state.triggersNotification(pathname)) { + await state.notifications.key(state.id).schedule(jobOptions, dist_bundle_noop); + } + if (isSearch) { + await state.search.key(state.id).schedule(jobOptions, dist_bundle_noop); + } + const req = state.global.key(state.id).schedule(jobOptions, request, options); + if (isGraphQL) { + const res = await req; + if (res.data.errors != null && res.data.errors.some((error) => error.type === "RATE_LIMITED")) { + const error = Object.assign(new Error("GraphQL Rate Limit Exceeded"), { + response: res, + data: res.data + }); + throw error; + } + } + return req; +} + +// pkg/dist-src/generated/triggers-notification-paths.js +var triggers_notification_paths_default = [ + "/orgs/{org}/invitations", + "/orgs/{org}/invitations/{invitation_id}", + "/orgs/{org}/teams/{team_slug}/discussions", + "/orgs/{org}/teams/{team_slug}/discussions/{discussion_number}/comments", + "/repos/{owner}/{repo}/collaborators/{username}", + "/repos/{owner}/{repo}/commits/{commit_sha}/comments", + "/repos/{owner}/{repo}/issues", + "/repos/{owner}/{repo}/issues/{issue_number}/comments", + "/repos/{owner}/{repo}/pulls", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments", + "/repos/{owner}/{repo}/pulls/{pull_number}/comments/{comment_id}/replies", + "/repos/{owner}/{repo}/pulls/{pull_number}/merge", + "/repos/{owner}/{repo}/pulls/{pull_number}/requested_reviewers", + "/repos/{owner}/{repo}/pulls/{pull_number}/reviews", + "/repos/{owner}/{repo}/releases", + "/teams/{team_id}/discussions", + "/teams/{team_id}/discussions/{discussion_number}/comments" +]; -const RE_ENCODED = /%[a-fA-F0-9][a-fA-F0-9]/g +// pkg/dist-src/route-matcher.js +function routeMatcher(paths) { + const regexes = paths.map( + (path) => path.split("/").map((c) => c.startsWith("{") ? "(?:.+?)" : c).join("/") + ); + const regex2 = `^(?:${regexes.map((r) => `(?:${r})`).join("|")})[^/]*$`; + return new RegExp(regex2, "i"); +} -const EncodedLookup = { - '%00': '\x00', '%01': '\x01', '%02': '\x02', '%03': '\x03', '%04': '\x04', - '%05': '\x05', '%06': '\x06', '%07': '\x07', '%08': '\x08', '%09': '\x09', - '%0a': '\x0a', '%0A': '\x0a', '%0b': '\x0b', '%0B': '\x0b', '%0c': '\x0c', - '%0C': '\x0c', '%0d': '\x0d', '%0D': '\x0d', '%0e': '\x0e', '%0E': '\x0e', - '%0f': '\x0f', '%0F': '\x0f', '%10': '\x10', '%11': '\x11', '%12': '\x12', - '%13': '\x13', '%14': '\x14', '%15': '\x15', '%16': '\x16', '%17': '\x17', - '%18': '\x18', '%19': '\x19', '%1a': '\x1a', '%1A': '\x1a', '%1b': '\x1b', - '%1B': '\x1b', '%1c': '\x1c', '%1C': '\x1c', '%1d': '\x1d', '%1D': '\x1d', - '%1e': '\x1e', '%1E': '\x1e', '%1f': '\x1f', '%1F': '\x1f', '%20': '\x20', - '%21': '\x21', '%22': '\x22', '%23': '\x23', '%24': '\x24', '%25': '\x25', - '%26': '\x26', '%27': '\x27', '%28': '\x28', '%29': '\x29', '%2a': '\x2a', - '%2A': '\x2a', '%2b': '\x2b', '%2B': '\x2b', '%2c': '\x2c', '%2C': '\x2c', - '%2d': '\x2d', '%2D': '\x2d', '%2e': '\x2e', '%2E': '\x2e', '%2f': '\x2f', - '%2F': '\x2f', '%30': '\x30', '%31': '\x31', '%32': '\x32', '%33': '\x33', - '%34': '\x34', '%35': '\x35', '%36': '\x36', '%37': '\x37', '%38': '\x38', - '%39': '\x39', '%3a': '\x3a', '%3A': '\x3a', '%3b': '\x3b', '%3B': '\x3b', - '%3c': '\x3c', '%3C': '\x3c', '%3d': '\x3d', '%3D': '\x3d', '%3e': '\x3e', - '%3E': '\x3e', '%3f': '\x3f', '%3F': '\x3f', '%40': '\x40', '%41': '\x41', - '%42': '\x42', '%43': '\x43', '%44': '\x44', '%45': '\x45', '%46': '\x46', - '%47': '\x47', '%48': '\x48', '%49': '\x49', '%4a': '\x4a', '%4A': '\x4a', - '%4b': '\x4b', '%4B': '\x4b', '%4c': '\x4c', '%4C': '\x4c', '%4d': '\x4d', - '%4D': '\x4d', '%4e': '\x4e', '%4E': '\x4e', '%4f': '\x4f', '%4F': '\x4f', - '%50': '\x50', '%51': '\x51', '%52': '\x52', '%53': '\x53', '%54': '\x54', - '%55': '\x55', '%56': '\x56', '%57': '\x57', '%58': '\x58', '%59': '\x59', - '%5a': '\x5a', '%5A': '\x5a', '%5b': '\x5b', '%5B': '\x5b', '%5c': '\x5c', - '%5C': '\x5c', '%5d': '\x5d', '%5D': '\x5d', '%5e': '\x5e', '%5E': '\x5e', - '%5f': '\x5f', '%5F': '\x5f', '%60': '\x60', '%61': '\x61', '%62': '\x62', - '%63': '\x63', '%64': '\x64', '%65': '\x65', '%66': '\x66', '%67': '\x67', - '%68': '\x68', '%69': '\x69', '%6a': '\x6a', '%6A': '\x6a', '%6b': '\x6b', - '%6B': '\x6b', '%6c': '\x6c', '%6C': '\x6c', '%6d': '\x6d', '%6D': '\x6d', - '%6e': '\x6e', '%6E': '\x6e', '%6f': '\x6f', '%6F': '\x6f', '%70': '\x70', - '%71': '\x71', '%72': '\x72', '%73': '\x73', '%74': '\x74', '%75': '\x75', - '%76': '\x76', '%77': '\x77', '%78': '\x78', '%79': '\x79', '%7a': '\x7a', - '%7A': '\x7a', '%7b': '\x7b', '%7B': '\x7b', '%7c': '\x7c', '%7C': '\x7c', - '%7d': '\x7d', '%7D': '\x7d', '%7e': '\x7e', '%7E': '\x7e', '%7f': '\x7f', - '%7F': '\x7f', '%80': '\x80', '%81': '\x81', '%82': '\x82', '%83': '\x83', - '%84': '\x84', '%85': '\x85', '%86': '\x86', '%87': '\x87', '%88': '\x88', - '%89': '\x89', '%8a': '\x8a', '%8A': '\x8a', '%8b': '\x8b', '%8B': '\x8b', - '%8c': '\x8c', '%8C': '\x8c', '%8d': '\x8d', '%8D': '\x8d', '%8e': '\x8e', - '%8E': '\x8e', '%8f': '\x8f', '%8F': '\x8f', '%90': '\x90', '%91': '\x91', - '%92': '\x92', '%93': '\x93', '%94': '\x94', '%95': '\x95', '%96': '\x96', - '%97': '\x97', '%98': '\x98', '%99': '\x99', '%9a': '\x9a', '%9A': '\x9a', - '%9b': '\x9b', '%9B': '\x9b', '%9c': '\x9c', '%9C': '\x9c', '%9d': '\x9d', - '%9D': '\x9d', '%9e': '\x9e', '%9E': '\x9e', '%9f': '\x9f', '%9F': '\x9f', - '%a0': '\xa0', '%A0': '\xa0', '%a1': '\xa1', '%A1': '\xa1', '%a2': '\xa2', - '%A2': '\xa2', '%a3': '\xa3', '%A3': '\xa3', '%a4': '\xa4', '%A4': '\xa4', - '%a5': '\xa5', '%A5': '\xa5', '%a6': '\xa6', '%A6': '\xa6', '%a7': '\xa7', - '%A7': '\xa7', '%a8': '\xa8', '%A8': '\xa8', '%a9': '\xa9', '%A9': '\xa9', - '%aa': '\xaa', '%Aa': '\xaa', '%aA': '\xaa', '%AA': '\xaa', '%ab': '\xab', - '%Ab': '\xab', '%aB': '\xab', '%AB': '\xab', '%ac': '\xac', '%Ac': '\xac', - '%aC': '\xac', '%AC': '\xac', '%ad': '\xad', '%Ad': '\xad', '%aD': '\xad', - '%AD': '\xad', '%ae': '\xae', '%Ae': '\xae', '%aE': '\xae', '%AE': '\xae', - '%af': '\xaf', '%Af': '\xaf', '%aF': '\xaf', '%AF': '\xaf', '%b0': '\xb0', - '%B0': '\xb0', '%b1': '\xb1', '%B1': '\xb1', '%b2': '\xb2', '%B2': '\xb2', - '%b3': '\xb3', '%B3': '\xb3', '%b4': '\xb4', '%B4': '\xb4', '%b5': '\xb5', - '%B5': '\xb5', '%b6': '\xb6', '%B6': '\xb6', '%b7': '\xb7', '%B7': '\xb7', - '%b8': '\xb8', '%B8': '\xb8', '%b9': '\xb9', '%B9': '\xb9', '%ba': '\xba', - '%Ba': '\xba', '%bA': '\xba', '%BA': '\xba', '%bb': '\xbb', '%Bb': '\xbb', - '%bB': '\xbb', '%BB': '\xbb', '%bc': '\xbc', '%Bc': '\xbc', '%bC': '\xbc', - '%BC': '\xbc', '%bd': '\xbd', '%Bd': '\xbd', '%bD': '\xbd', '%BD': '\xbd', - '%be': '\xbe', '%Be': '\xbe', '%bE': '\xbe', '%BE': '\xbe', '%bf': '\xbf', - '%Bf': '\xbf', '%bF': '\xbf', '%BF': '\xbf', '%c0': '\xc0', '%C0': '\xc0', - '%c1': '\xc1', '%C1': '\xc1', '%c2': '\xc2', '%C2': '\xc2', '%c3': '\xc3', - '%C3': '\xc3', '%c4': '\xc4', '%C4': '\xc4', '%c5': '\xc5', '%C5': '\xc5', - '%c6': '\xc6', '%C6': '\xc6', '%c7': '\xc7', '%C7': '\xc7', '%c8': '\xc8', - '%C8': '\xc8', '%c9': '\xc9', '%C9': '\xc9', '%ca': '\xca', '%Ca': '\xca', - '%cA': '\xca', '%CA': '\xca', '%cb': '\xcb', '%Cb': '\xcb', '%cB': '\xcb', - '%CB': '\xcb', '%cc': '\xcc', '%Cc': '\xcc', '%cC': '\xcc', '%CC': '\xcc', - '%cd': '\xcd', '%Cd': '\xcd', '%cD': '\xcd', '%CD': '\xcd', '%ce': '\xce', - '%Ce': '\xce', '%cE': '\xce', '%CE': '\xce', '%cf': '\xcf', '%Cf': '\xcf', - '%cF': '\xcf', '%CF': '\xcf', '%d0': '\xd0', '%D0': '\xd0', '%d1': '\xd1', - '%D1': '\xd1', '%d2': '\xd2', '%D2': '\xd2', '%d3': '\xd3', '%D3': '\xd3', - '%d4': '\xd4', '%D4': '\xd4', '%d5': '\xd5', '%D5': '\xd5', '%d6': '\xd6', - '%D6': '\xd6', '%d7': '\xd7', '%D7': '\xd7', '%d8': '\xd8', '%D8': '\xd8', - '%d9': '\xd9', '%D9': '\xd9', '%da': '\xda', '%Da': '\xda', '%dA': '\xda', - '%DA': '\xda', '%db': '\xdb', '%Db': '\xdb', '%dB': '\xdb', '%DB': '\xdb', - '%dc': '\xdc', '%Dc': '\xdc', '%dC': '\xdc', '%DC': '\xdc', '%dd': '\xdd', - '%Dd': '\xdd', '%dD': '\xdd', '%DD': '\xdd', '%de': '\xde', '%De': '\xde', - '%dE': '\xde', '%DE': '\xde', '%df': '\xdf', '%Df': '\xdf', '%dF': '\xdf', - '%DF': '\xdf', '%e0': '\xe0', '%E0': '\xe0', '%e1': '\xe1', '%E1': '\xe1', - '%e2': '\xe2', '%E2': '\xe2', '%e3': '\xe3', '%E3': '\xe3', '%e4': '\xe4', - '%E4': '\xe4', '%e5': '\xe5', '%E5': '\xe5', '%e6': '\xe6', '%E6': '\xe6', - '%e7': '\xe7', '%E7': '\xe7', '%e8': '\xe8', '%E8': '\xe8', '%e9': '\xe9', - '%E9': '\xe9', '%ea': '\xea', '%Ea': '\xea', '%eA': '\xea', '%EA': '\xea', - '%eb': '\xeb', '%Eb': '\xeb', '%eB': '\xeb', '%EB': '\xeb', '%ec': '\xec', - '%Ec': '\xec', '%eC': '\xec', '%EC': '\xec', '%ed': '\xed', '%Ed': '\xed', - '%eD': '\xed', '%ED': '\xed', '%ee': '\xee', '%Ee': '\xee', '%eE': '\xee', - '%EE': '\xee', '%ef': '\xef', '%Ef': '\xef', '%eF': '\xef', '%EF': '\xef', - '%f0': '\xf0', '%F0': '\xf0', '%f1': '\xf1', '%F1': '\xf1', '%f2': '\xf2', - '%F2': '\xf2', '%f3': '\xf3', '%F3': '\xf3', '%f4': '\xf4', '%F4': '\xf4', - '%f5': '\xf5', '%F5': '\xf5', '%f6': '\xf6', '%F6': '\xf6', '%f7': '\xf7', - '%F7': '\xf7', '%f8': '\xf8', '%F8': '\xf8', '%f9': '\xf9', '%F9': '\xf9', - '%fa': '\xfa', '%Fa': '\xfa', '%fA': '\xfa', '%FA': '\xfa', '%fb': '\xfb', - '%Fb': '\xfb', '%fB': '\xfb', '%FB': '\xfb', '%fc': '\xfc', '%Fc': '\xfc', - '%fC': '\xfc', '%FC': '\xfc', '%fd': '\xfd', '%Fd': '\xfd', '%fD': '\xfd', - '%FD': '\xfd', '%fe': '\xfe', '%Fe': '\xfe', '%fE': '\xfe', '%FE': '\xfe', - '%ff': '\xff', '%Ff': '\xff', '%fF': '\xff', '%FF': '\xff' +// pkg/dist-src/index.js +var regex = routeMatcher(triggers_notification_paths_default); +var triggersNotification = regex.test.bind(regex); +var groups = {}; +var createGroups = function(Bottleneck, common) { + groups.global = new Bottleneck.Group({ + id: "octokit-global", + maxConcurrent: 10, + ...common + }); + groups.search = new Bottleneck.Group({ + id: "octokit-search", + maxConcurrent: 1, + minTime: 2e3, + ...common + }); + groups.write = new Bottleneck.Group({ + id: "octokit-write", + maxConcurrent: 1, + minTime: 1e3, + ...common + }); + groups.notifications = new Bottleneck.Group({ + id: "octokit-notifications", + maxConcurrent: 1, + minTime: 3e3, + ...common + }); +}; +function throttling(octokit, octokitOptions) { + const { + enabled = true, + Bottleneck = light, + id = "no-id", + timeout = 1e3 * 60 * 2, + // Redis TTL: 2 minutes + connection + } = octokitOptions.throttle || {}; + if (!enabled) { + return {}; + } + const common = { connection, timeout }; + if (groups.global == null) { + createGroups(Bottleneck, common); + } + const state = Object.assign( + { + clustering: connection != null, + triggersNotification, + fallbackSecondaryRateRetryAfter: 60, + retryAfterBaseValue: 1e3, + retryLimiter: new Bottleneck(), + id, + ...groups + }, + octokitOptions.throttle + ); + if (typeof state.onSecondaryRateLimit !== "function" || typeof state.onRateLimit !== "function") { + throw new Error(`octokit/plugin-throttling error: + You must pass the onSecondaryRateLimit and onRateLimit error handlers. + See https://octokit.github.io/rest.js/#throttling + + const octokit = new Octokit({ + throttle: { + onSecondaryRateLimit: (retryAfter, options) => {/* ... */}, + onRateLimit: (retryAfter, options) => {/* ... */} + } + }) + `); + } + const events = {}; + const emitter = new Bottleneck.Events(events); + events.on("secondary-limit", state.onSecondaryRateLimit); + events.on("rate-limit", state.onRateLimit); + events.on( + "error", + (e) => octokit.log.warn("Error in throttling-plugin limit handler", e) + ); + state.retryLimiter.on("failed", async function(error, info) { + const [state2, request, options] = info.args; + const { pathname } = new URL(options.url, "http://github.test"); + const shouldRetryGraphQL = pathname.startsWith("/graphql") && error.status !== 401; + if (!(shouldRetryGraphQL || error.status === 403)) { + return; + } + const retryCount = ~~request.retryCount; + request.retryCount = retryCount; + options.request.retryCount = retryCount; + const { wantRetry, retryAfter = 0 } = await async function() { + if (/\bsecondary rate\b/i.test(error.message)) { + const retryAfter2 = Number(error.response.headers["retry-after"]) || state2.fallbackSecondaryRateRetryAfter; + const wantRetry2 = await emitter.trigger( + "secondary-limit", + retryAfter2, + options, + octokit, + retryCount + ); + return { wantRetry: wantRetry2, retryAfter: retryAfter2 }; + } + if (error.response.headers != null && error.response.headers["x-ratelimit-remaining"] === "0" || (error.response.data?.errors ?? []).some( + (error2) => error2.type === "RATE_LIMITED" + )) { + const rateLimitReset = new Date( + ~~error.response.headers["x-ratelimit-reset"] * 1e3 + ).getTime(); + const retryAfter2 = Math.max( + // Add one second so we retry _after_ the reset time + // https://docs.github.com/en/rest/overview/resources-in-the-rest-api?apiVersion=2022-11-28#exceeding-the-rate-limit + Math.ceil((rateLimitReset - Date.now()) / 1e3) + 1, + 0 + ); + const wantRetry2 = await emitter.trigger( + "rate-limit", + retryAfter2, + options, + octokit, + retryCount + ); + return { wantRetry: wantRetry2, retryAfter: retryAfter2 }; + } + return {}; + }(); + if (wantRetry) { + request.retryCount++; + return retryAfter * state2.retryAfterBaseValue; + } + }); + octokit.hook.wrap("request", wrapRequest.bind(null, state)); + return {}; } +throttling.VERSION = plugin_throttling_dist_bundle_VERSION; +throttling.triggersNotification = triggersNotification; -function encodedReplacer (match) { - return EncodedLookup[match] + +;// CONCATENATED MODULE: ./node_modules/@octokit/plugin-retry/dist-bundle/index.js +// pkg/dist-src/version.js +var plugin_retry_dist_bundle_VERSION = "0.0.0-development"; + +// pkg/dist-src/error-request.js +async function errorRequest(state, octokit, error, options) { + if (!error.request || !error.request.request) { + throw error; + } + if (error.status >= 400 && !state.doNotRetry.includes(error.status)) { + const retries = options.request.retries != null ? options.request.retries : state.retries; + const retryAfter = Math.pow((options.request.retryCount || 0) + 1, 2); + throw octokit.retry.retryRequest(error, retries, retryAfter); + } + throw error; } -const STATE_KEY = 0 -const STATE_VALUE = 1 -const STATE_CHARSET = 2 -const STATE_LANG = 3 +// pkg/dist-src/wrap-request.js -function parseParams (str) { - const res = [] - let state = STATE_KEY - let charset = '' - let inquote = false - let escaping = false - let p = 0 - let tmp = '' - const len = str.length - for (var i = 0; i < len; ++i) { // eslint-disable-line no-var - const char = str[i] - if (char === '\\' && inquote) { - if (escaping) { escaping = false } else { - escaping = true - continue - } - } else if (char === '"') { - if (!escaping) { - if (inquote) { - inquote = false - state = STATE_KEY - } else { inquote = true } - continue - } else { escaping = false } - } else { - if (escaping && inquote) { tmp += '\\' } - escaping = false - if ((state === STATE_CHARSET || state === STATE_LANG) && char === "'") { - if (state === STATE_CHARSET) { - state = STATE_LANG - charset = tmp.substring(1) - } else { state = STATE_VALUE } - tmp = '' - continue - } else if (state === STATE_KEY && - (char === '*' || char === '=') && - res.length) { - state = char === '*' - ? STATE_CHARSET - : STATE_VALUE - res[p] = [tmp, undefined] - tmp = '' - continue - } else if (!inquote && char === ';') { - state = STATE_KEY - if (charset) { - if (tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } - charset = '' - } else if (tmp.length) { - tmp = decodeText(tmp, 'binary', 'utf8') - } - if (res[p] === undefined) { res[p] = tmp } else { res[p][1] = tmp } - tmp = '' - ++p - continue - } else if (!inquote && (char === ' ' || char === '\t')) { continue } +async function dist_bundle_wrapRequest(state, octokit, request, options) { + const limiter = new light(); + limiter.on("failed", function(error, info) { + const maxRetries = ~~error.request.request.retries; + const after = ~~error.request.request.retryAfter; + options.request.retryCount = info.retryCount + 1; + if (maxRetries > info.retryCount) { + return after * state.retryAfterBaseValue; } - tmp += char + }); + return limiter.schedule( + requestWithGraphqlErrorHandling.bind(null, state, octokit, request), + options + ); +} +async function requestWithGraphqlErrorHandling(state, octokit, request, options) { + const response = await request(request, options); + if (response.data && response.data.errors && response.data.errors.length > 0 && /Something went wrong while executing your query/.test( + response.data.errors[0].message + )) { + const error = new RequestError(response.data.errors[0].message, 500, { + request: options, + response + }); + return errorRequest(state, octokit, error, options); } - if (charset && tmp.length) { - tmp = decodeText(tmp.replace(RE_ENCODED, encodedReplacer), - 'binary', - charset) - } else if (tmp) { - tmp = decodeText(tmp, 'binary', 'utf8') + return response; +} + +// pkg/dist-src/index.js +function retry(octokit, octokitOptions) { + const state = Object.assign( + { + enabled: true, + retryAfterBaseValue: 1e3, + doNotRetry: [400, 401, 403, 404, 422, 451], + retries: 3 + }, + octokitOptions.retry + ); + if (state.enabled) { + octokit.hook.error("request", errorRequest.bind(null, state, octokit)); + octokit.hook.wrap("request", dist_bundle_wrapRequest.bind(null, state, octokit)); } + return { + retry: { + retryRequest: (error, retries, retryAfter) => { + error.request.request = Object.assign({}, error.request.request, { + retries, + retryAfter + }); + return error; + } + } + }; +} +retry.VERSION = plugin_retry_dist_bundle_VERSION; - if (res[p] === undefined) { - if (tmp) { res[p] = tmp } - } else { res[p][1] = tmp } - return res +;// CONCATENATED MODULE: ./node_modules/@octokit/plugin-request-log/dist-src/version.js +const dist_src_version_VERSION = "5.3.1"; + + +;// CONCATENATED MODULE: ./node_modules/@octokit/plugin-request-log/dist-src/index.js + +function requestLog(octokit) { + octokit.hook.wrap("request", (request, options) => { + octokit.log.debug("request", options); + const start = Date.now(); + const requestOptions = octokit.request.endpoint.parse(options); + const path = requestOptions.url.replace(options.baseUrl, ""); + return request(options).then((response) => { + const requestId = response.headers["x-github-request-id"]; + octokit.log.info( + `${requestOptions.method} ${path} - ${response.status} with id ${requestId} in ${Date.now() - start}ms` + ); + return response; + }).catch((error) => { + const requestId = error.response?.headers["x-github-request-id"] || "UNKNOWN"; + octokit.log.error( + `${requestOptions.method} ${path} - ${error.status} with id ${requestId} in ${Date.now() - start}ms` + ); + throw error; + }); + }); } +requestLog.VERSION = dist_src_version_VERSION; -module.exports = parseParams + +// EXTERNAL MODULE: ./node_modules/lodash/lodash.js +var lodash = __nccwpck_require__(250); +;// CONCATENATED MODULE: external "readline" +const external_readline_namespaceObject = require("readline"); +// EXTERNAL MODULE: ./node_modules/adm-zip/adm-zip.js +var adm_zip = __nccwpck_require__(6761); +// EXTERNAL MODULE: ./node_modules/@actions/artifact/lib/artifact-client.js +var artifact_client = __nccwpck_require__(2605); +;// CONCATENATED MODULE: ./node_modules/@zaproxy/actions-common-scans/dist/models/FilteredSite.js +function isFilteredSite(site) { + return Array.isArray(site.ignoredAlerts); +} + +;// CONCATENATED MODULE: ./node_modules/@zaproxy/actions-common-scans/dist/models/DifferenceSite.js +function isDifferenceSite(site) { + return Array.isArray(site.removedAlerts); +} + +;// CONCATENATED MODULE: ./node_modules/@zaproxy/actions-common-scans/dist/action-helper.js + + + + + + + +function createReadStreamSafe(filename) { + return new Promise((resolve, reject) => { + const fileStream = external_fs_.createReadStream(filename); + fileStream.on("error", reject).on("open", () => { + resolve(fileStream); + }); + }); +} +const actionHelper = { + getRunnerID: (body) => { + const results = body.match("RunnerID:\\d+"); + if (results !== null && results.length !== 0) { + return results[0].split(":")[1]; + } + return null; + }, + processLineByLine: async (tsvFile) => { + const plugins = []; + try { + const fileStream = await createReadStreamSafe(tsvFile); + const rl = external_readline_namespaceObject.createInterface({ + input: fileStream, + crlfDelay: Infinity, + }); + for await (const line of rl) { + if (!line.startsWith("#")) { + const tmp = line.split("\t"); + if (tmp[0].trim() !== "" && + tmp[1].trim().toUpperCase() === "IGNORE") { + plugins.push(tmp[0].trim()); + } + } + } + } + catch (err) { + console.log(`Error when reading the rules file: ${tsvFile}`); + } + return plugins; + }, + createMessage: (sites, runnerID, runnerLink) => { + const NXT_LINE = "\n"; + const TAB = "\t"; + const BULLET = "-"; + let msg = ""; + const instanceCount = 5; + sites.forEach((site) => { + msg = + msg + + `${BULLET} Site: [${site["@name"]}](${site["@name"]}) ${NXT_LINE}`; + if ("alerts" in site) { + if (site.alerts.length !== 0) { + msg = `${msg} ${TAB} **New Alerts** ${NXT_LINE}`; + site.alerts.forEach((alert) => { + msg = + msg + + TAB + + `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; + for (let i = 0; i < alert.instances.length; i++) { + if (i >= instanceCount) { + msg = msg + TAB + TAB + `${BULLET} .. ${NXT_LINE}`; + break; + } + const instance = alert.instances[i]; + msg = + msg + + TAB + + TAB + + `${BULLET} [${instance.uri}](${instance.uri}) ${NXT_LINE}`; + } + }); + msg = msg + NXT_LINE; + } + } + if (isDifferenceSite(site)) { + if (site.removedAlerts.length !== 0) { + msg = `${msg} ${TAB} **Resolved Alerts** ${NXT_LINE}`; + site.removedAlerts.forEach((alert) => { + msg = + msg + + TAB + + `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; + }); + msg = msg + NXT_LINE; + } + } + if (isFilteredSite(site)) { + if (site.ignoredAlerts.length !== 0) { + msg = `${msg} ${TAB} **Ignored Alerts** ${NXT_LINE}`; + site.ignoredAlerts.forEach((alert) => { + msg = + msg + + TAB + + `${BULLET} **${alert.name}** [${alert.pluginid}] total: ${alert.instances.length}: ${NXT_LINE}`; + }); + msg = msg + NXT_LINE; + } + } + msg = msg + NXT_LINE; + }); + if (msg.trim() !== "") { + msg = msg + NXT_LINE + runnerLink; + msg = msg + NXT_LINE + runnerID; + } + return msg; + }, + generateDifference: (newReport, oldReport) => { + newReport.updated = false; + const siteClone = []; + newReport.site.forEach((newReportSite) => { + // Check if the new report site already exists in the previous report + const previousSite = lodash.filter(oldReport.site, (s) => s["@name"] === newReportSite["@name"]); + // If does not exists add it to the array without further processing + if (previousSite.length === 0) { + newReport.updated = true; + siteClone.push(newReportSite); + } + else { + // deep clone the variable for further processing + const newSite = lodash.clone(newReportSite); + const currentAlerts = newReportSite.alerts; + const previousAlerts = previousSite[0].alerts; + const newAlerts = lodash.differenceBy(currentAlerts, previousAlerts, "pluginid"); + let removedAlerts = lodash.differenceBy(previousAlerts, currentAlerts, "pluginid"); + let ignoredAlerts = []; + if (isFilteredSite(newReportSite) && isFilteredSite(previousSite[0])) { + ignoredAlerts = lodash.differenceBy(newReportSite.ignoredAlerts, previousSite[0].ignoredAlerts, "pluginid"); + } + else if (isFilteredSite(newReportSite)) { + ignoredAlerts = newReportSite.ignoredAlerts; + } + removedAlerts = lodash.differenceBy(removedAlerts, ignoredAlerts, "pluginid"); + newSite.alerts = newAlerts; + newSite.removedAlerts = removedAlerts; + newSite.ignoredAlerts = ignoredAlerts; + siteClone.push(newSite); + if (newAlerts.length !== 0 || + removedAlerts.length !== 0 || + ignoredAlerts.length !== 0) { + newReport.updated = true; + } + } + }); + return siteClone; + }, + readMDFile: async (reportName) => { + let res = ""; + try { + res = external_fs_.readFileSync(reportName, { encoding: "base64" }); + } + catch (err) { + console.log("error while reading the markdown file!"); + } + return res; + }, + checkIfAlertsExists: (jsonReport) => { + return jsonReport.site.some((s) => { + return "alerts" in s && s.alerts.length !== 0; + }); + }, + filterReport: async (jsonReport, plugins) => { + jsonReport.site.forEach((s) => { + if ("alerts" in s && s.alerts.length !== 0) { + console.log(`starting to filter the alerts for site: ${s["@name"]}`); + const newAlerts = s.alerts.filter(function (e) { + return !plugins.includes(e.pluginid); + }); + const removedAlerts = s.alerts.filter(function (e) { + return plugins.includes(e.pluginid); + }); + s.alerts = newAlerts; + s.ignoredAlerts = removedAlerts; + console.log(`#${newAlerts.length} alerts have been identified` + + ` and #${removedAlerts.length} alerts have been ignored for the site.`); + } + }); + return jsonReport; + }, + readPreviousReport: async (octokit, owner, repo, workSpace, runnerID, artifactName = "zap_scan") => { + let previousReport; + try { + const artifactList = await octokit.request("GET /repos/{owner}/{repo}/actions/runs/{run_id}/artifacts", { + owner: owner, + repo: repo, + run_id: runnerID, + }); + const artifacts = artifactList.data.artifacts; + let artifactID; + if (artifacts.length !== 0) { + artifacts.forEach((a) => { + if (a.name === artifactName) { + artifactID = a.id; + } + }); + } + if (artifactID !== undefined) { + const download = await octokit.request("GET /repos/{owner}/{repo}/actions/artifacts/{artifact_id}/zip", { + owner: owner, + repo: repo, + artifact_id: artifactID, + archive_format: "zip", + }); + const zip = new adm_zip(Buffer.from(download.data)); + const zipEntries = zip.getEntries(); + zipEntries.forEach(function (zipEntry) { + if (zipEntry.entryName === "report_json.json") { + previousReport = JSON.parse(zipEntry.getData().toString("utf8")); + } + }); + } + } + catch (e) { + console.log(`Error occurred while downloading the artifacts!`); + } + return previousReport; + }, + uploadArtifacts: async (rootDir, mdReport, jsonReport, htmlReport, artifactName = "zap_scan") => { + const artifactClient = (0,artifact_client/* create */.U)(); + const files = [ + `${rootDir}/${mdReport}`, + `${rootDir}/${jsonReport}`, + `${rootDir}/${htmlReport}`, + ]; + const rootDirectory = rootDir; + const options = { + continueOnError: true, + }; + await artifactClient.uploadArtifact(artifactName, files, rootDirectory, options); + }, +}; +/* harmony default export */ const action_helper = (actionHelper); + +;// CONCATENATED MODULE: ./node_modules/@zaproxy/actions-common-scans/dist/index.js + + + + + + +const actionCommon = { + processReport: async (token, workSpace, plugins, currentRunnerID, issueTitle, repoName, allowIssueWriting = true, artifactName = "zap_scan", fetcher = fetch) => { + const jsonReportName = "report_json.json"; + const mdReportName = "report_md.md"; + const htmlReportName = "report_html.html"; + if (!allowIssueWriting) { + await action_helper.uploadArtifacts(workSpace, mdReportName, jsonReportName, htmlReportName, artifactName); + return; + } + let openIssue; + let currentReport; + let previousRunnerID; + let previousReport = {}; + let create_new_issue = false; + const tmp = repoName.split("/"); + const owner = tmp[0]; + const repo = tmp[1]; + const MyOctokit = Octokit.plugin(retry, throttling, requestLog); + if (fetcher !== fetch) { + MyOctokit.plugin(requestLog); + } + const octokit = new MyOctokit({ + auth: token, + baseUrl: process.env.GITHUB_API_URL ?? "https://api.github.com", + throttle: { + onRateLimit: (retryAfter, options) => { + if (options?.request?.retryCount <= 2) { + console.log(`Hit rate limit. Retrying after ${retryAfter} seconds!`); + return true; + } + }, + onSecondaryRateLimit: (retryAfter) => { + console.log(`Hit secondary rate limit. Retrying after ${retryAfter} seconds!`); + return true; + }, + }, + request: { + // Fetch is a type from @octokit/types, which is set to any.. + // so we have to disable the eslint check for now. + // eslint-disable-next-line @typescript-eslint/no-unsafe-assignment + fetch: fetcher, + }, + }); + try { + const jReportFile = external_fs_.readFileSync(`${workSpace}/${jsonReportName}`); + currentReport = JSON.parse(jReportFile.toString()); + } + catch (e) { + console.log("Failed to locate the json report generated by ZAP Scan!"); + return; + } + const issues = await octokit.request("GET /search/issues", { + q: encodeURI(`is:issue state:open repo:${owner}/${repo} ${issueTitle}`).replace(/%20/g, "+"), + sort: "updated", + }); + // If there is no existing open issue then create a new issue + if (issues.data.items.length === 0) { + create_new_issue = true; + } + else { + let login = "github-actions[bot]"; + try { + login = (await octokit.request("GET /user")).data.login; + } + catch (e) { + console.log(`Using ${login} to search for issues.`); + } + // Sometimes search API returns recently closed issue as an open issue + for (const issue of issues.data.items) { + if (issue.state === "open" && issue.user.login === login) { + openIssue = issue; + break; + } + } + if (openIssue === undefined) { + create_new_issue = true; + } + else { + console.log(`Ongoing open issue has been identified #${openIssue.number}`); + // If there is no comments then read the body + if (openIssue.comments === 0) { + previousRunnerID = action_helper.getRunnerID(openIssue.body); + } + else { + const comments = await octokit.request("GET /repos/{owner}/{repo}/issues/{issue_number}/comments", { + owner: owner, + repo: repo, + issue_number: openIssue.number, + }); + let lastBotComment; + const lastCommentIndex = comments.data.length - 1; + for (let i = lastCommentIndex; i >= 0; i--) { + if (comments.data[i].user.login === login) { + lastBotComment = comments.data[i]; + break; + } + } + if (lastBotComment === undefined) { + previousRunnerID = action_helper.getRunnerID(openIssue.body); + } + else { + previousRunnerID = action_helper.getRunnerID(lastBotComment.body); + } + } + if (previousRunnerID !== null) { + previousReport = await action_helper.readPreviousReport(octokit, owner, repo, workSpace, previousRunnerID, artifactName); + if (previousReport === undefined) { + create_new_issue = true; + } + } + } + } + if (plugins.length !== 0) { + console.log(`${plugins.length} plugins will be ignored according to the rules configuration`); + currentReport = await action_helper.filterReport(currentReport, plugins); + // Update the newly filtered report + external_fs_.unlinkSync(`${workSpace}/${jsonReportName}`); + external_fs_.writeFileSync(`${workSpace}/${jsonReportName}`, JSON.stringify(currentReport)); + console.log("The current report is updated with the ignored alerts!"); + } + const newAlertExits = action_helper.checkIfAlertsExists(currentReport); + console.log(`Alerts present in the current report: ${newAlertExits.toString()}`); + if (!newAlertExits) { + // If no new alerts have been found close the issue + console.log("No new alerts have been identified by the ZAP Scan"); + if (openIssue != null && openIssue.state === "open") { + // close the issue with a comment + console.log(`Starting to close the issue #${openIssue.number}`); + try { + await octokit.request("POST /repos/{owner}/{repo}/issues/{issue_number}/comments", { + owner: owner, + repo: repo, + issue_number: openIssue.number, + body: "All the alerts have been resolved during the last ZAP Scan!", + }); + await octokit.request("PATCH /repos/{owner}/{repo}/issues/{issue_number}", { + owner: owner, + repo: repo, + issue_number: openIssue.number, + state: "closed", + }); + console.log(`Successfully closed the issue #${openIssue.number}`); + } + catch (err) { + console.log(`Error occurred while closing the issue with a comment! err: ${err.toString()}`); + } + } + else if (openIssue != null && openIssue.state === "closed") { + console.log("No alerts found by ZAP Scan and no active issue is found in the repository, exiting the program!"); + } + return; + } + const runnerInfo = `RunnerID:${currentRunnerID}`; + const runnerLink = `View the [following link](${process.env.GITHUB_SERVER_URL}/${owner}/${repo}/actions/runs/${currentRunnerID})` + + ` to download the report.`; + if (create_new_issue) { + const msg = action_helper.createMessage(currentReport.site, runnerInfo, runnerLink) + + "\n\n---\n[ZAP by Checkmarx](https://checkmarx.com/)"; + const newIssue = await octokit.request("POST /repos/{owner}/{repo}/issues", { + owner: owner, + repo: repo, + title: issueTitle, + body: msg, + }); + console.log(`Process completed successfully and a new issue #${newIssue.data.number} has been created for the ZAP Scan.`); + } + else { + const siteClone = action_helper.generateDifference(currentReport, previousReport); + if (currentReport.updated) { + console.log("The current report has changes compared to the previous report"); + try { + const msg = action_helper.createMessage(siteClone, runnerInfo, runnerLink); + await octokit.request("POST /repos/{owner}/{repo}/issues/{issue_number}/comments", { + owner: owner, + repo: repo, + issue_number: openIssue.number, + body: msg, + }); + console.log(`The issue #${openIssue.number} has been updated with the latest ZAP scan results!`); + console.log("ZAP Scan process completed successfully!"); + } + catch (err) { + console.log(`Error occurred while updating the issue #${openIssue.number} with the latest ZAP scan: ${err.toString()}`); + } + } + else { + console.log("No changes have been observed from the previous scan and current scan!, exiting the program!"); + } + } + await action_helper.uploadArtifacts(workSpace, mdReportName, jsonReportName, htmlReportName, artifactName); + }, +}; +const main = actionCommon; +const helper = action_helper; /***/ }) @@ -59572,6 +55043,34 @@ module.exports = parseParams /******/ } /******/ /************************************************************************/ +/******/ /* webpack/runtime/define property getters */ +/******/ (() => { +/******/ // define getter functions for harmony exports +/******/ __nccwpck_require__.d = (exports, definition) => { +/******/ for(var key in definition) { +/******/ if(__nccwpck_require__.o(definition, key) && !__nccwpck_require__.o(exports, key)) { +/******/ Object.defineProperty(exports, key, { enumerable: true, get: definition[key] }); +/******/ } +/******/ } +/******/ }; +/******/ })(); +/******/ +/******/ /* webpack/runtime/hasOwnProperty shorthand */ +/******/ (() => { +/******/ __nccwpck_require__.o = (obj, prop) => (Object.prototype.hasOwnProperty.call(obj, prop)) +/******/ })(); +/******/ +/******/ /* webpack/runtime/make namespace object */ +/******/ (() => { +/******/ // define __esModule on exports +/******/ __nccwpck_require__.r = (exports) => { +/******/ if(typeof Symbol !== 'undefined' && Symbol.toStringTag) { +/******/ Object.defineProperty(exports, Symbol.toStringTag, { value: 'Module' }); +/******/ } +/******/ Object.defineProperty(exports, '__esModule', { value: true }); +/******/ }; +/******/ })(); +/******/ /******/ /* webpack/runtime/node module decorator */ /******/ (() => { /******/ __nccwpck_require__.nmd = (module) => { @@ -59591,7 +55090,7 @@ var __webpack_exports__ = {}; (() => { const core = __nccwpck_require__(2186); const exec = __nccwpck_require__(1514); -const common = __nccwpck_require__(4049); +const common = __nccwpck_require__(4048); const _ = __nccwpck_require__(250); // Default file names diff --git a/package-lock.json b/package-lock.json index 4299c02..0441182 100644 --- a/package-lock.json +++ b/package-lock.json @@ -1,7 +1,7 @@ { "name": "action-baseline-scan", "version": "1.0.0", - "lockfileVersion": 2, + "lockfileVersion": 3, "requires": true, "packages": { "": { @@ -10,7 +10,7 @@ "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", - "@zaproxy/actions-common-scans": "^1.1.0", + "@zaproxy/actions-common-scans": "^1.2.1", "lodash": "^4.17.21" }, "devDependencies": { @@ -18,9 +18,9 @@ } }, "node_modules/@actions/artifact": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.1.tgz", - "integrity": "sha512-Vv4y0EW0ptEkU+Pjs5RGS/0EryTvI6s79LjSV9Gg/h+O3H/ddpjhuX/Bi/HZE4pbNPyjGtQjbdFWphkZhmgabA==", + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.2.tgz", + "integrity": "sha512-1gLONA4xw3/Q/9vGxKwkFdV9u1LE2RWGx/IpAqg28ZjprCnJFjwn4pA7LtShqg5mg5WhMek2fjpyH1leCmOlQQ==", "dependencies": { "@actions/core": "^1.9.1", "@actions/http-client": "^2.0.1", @@ -29,9 +29,9 @@ } }, "node_modules/@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", + "version": "1.10.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", + "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", "dependencies": { "@actions/http-client": "^2.0.1", "uuid": "^8.3.2" @@ -45,21 +45,10 @@ "@actions/io": "^1.0.1" } }, - "node_modules/@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", - "dependencies": { - "@actions/http-client": "^2.2.0", - "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" - } - }, "node_modules/@actions/http-client": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", - "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", "dependencies": { "tunnel": "^0.0.6", "undici": "^5.25.4" @@ -70,649 +59,3461 @@ "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" }, - "node_modules/@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "node_modules/@ampproject/remapping": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", + "integrity": "sha512-30iZtAPgz+LTIYoeivqYo853f02jBYSd5uGnGpkFV0M3xOt9aN73erkgYAmZU43x4VfqcnLxW9Kpg3R5LC4YYw==", + "dependencies": { + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.24" + }, "engines": { - "node": ">=14" + "node": ">=6.0.0" } }, - "node_modules/@octokit/auth-token": { - "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", - "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==", + "node_modules/@babel/code-frame": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", + "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "dependencies": { + "@babel/highlight": "^7.24.7", + "picocolors": "^1.0.0" + }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" } }, - "node_modules/@octokit/core": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.1.0.tgz", - "integrity": "sha512-BDa2VAMLSh3otEiaMJ/3Y36GU4qf6GI+VivQ/P41NC6GHcdxpKlqV0ikSZ5gdQsmS3ojXeRx5vasgNTinF0Q4g==", + "node_modules/@babel/compat-data": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.4.tgz", + "integrity": "sha512-+LGRog6RAsCJrrrg/IO6LGmpphNe5DiK30dGjCoxxeGv49B10/3XYGxPsAwrDlMFcFEvdAUavDT8r9k/hSyQqQ==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/core": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", + "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", "dependencies": { - "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.0.0", - "@octokit/request": "^8.0.2", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" + "@ampproject/remapping": "^2.2.0", + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.0", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-module-transforms": "^7.25.2", + "@babel/helpers": "^7.25.0", + "@babel/parser": "^7.25.0", + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.2", + "@babel/types": "^7.25.2", + "convert-source-map": "^2.0.0", + "debug": "^4.1.0", + "gensync": "^1.0.0-beta.2", + "json5": "^2.2.3", + "semver": "^6.3.1" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/babel" } }, - "node_modules/@octokit/endpoint": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.4.tgz", - "integrity": "sha512-DWPLtr1Kz3tv8L0UvXTDP1fNwM0S+z6EJpRcvH66orY6Eld4XBMCSYsaWp4xIm61jTWxK68BrR7ibO+vSDnZqw==", + "node_modules/@babel/generator": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.6.tgz", + "integrity": "sha512-VPC82gr1seXOpkjAAKoLhP50vx4vGNlF4msF64dSFq1P8RfB+QAuJWGHPXXPc8QyfVWwwB/TNNU4+ayZmHNbZw==", "dependencies": { - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" + "@babel/types": "^7.25.6", + "@jridgewell/gen-mapping": "^0.3.5", + "@jridgewell/trace-mapping": "^0.3.25", + "jsesc": "^2.5.1" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" } }, - "node_modules/@octokit/graphql": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.0.2.tgz", - "integrity": "sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q==", + "node_modules/@babel/helper-annotate-as-pure": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-annotate-as-pure/-/helper-annotate-as-pure-7.24.7.tgz", + "integrity": "sha512-BaDeOonYvhdKw+JoMVkAixAAJzG2jVPIwWoKBPdYuY9b452e2rPuI9QPYh3KpofZ3pW2akOmwZLOiOsHMiqRAg==", "dependencies": { - "@octokit/request": "^8.0.1", - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" + "@babel/types": "^7.24.7" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" } }, - "node_modules/@octokit/openapi-types": { - "version": "20.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", - "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" + "node_modules/@babel/helper-builder-binary-assignment-operator-visitor": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-builder-binary-assignment-operator-visitor/-/helper-builder-binary-assignment-operator-visitor-7.24.7.tgz", + "integrity": "sha512-xZeCVVdwb4MsDBkkyZ64tReWYrLRHlMN72vP7Bdm3OUOuyFZExhsHUUnuWnm2/XOlAJzR0LfPpB56WXZn0X/lA==", + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } }, - "node_modules/@octokit/plugin-paginate-rest": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", - "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", + "node_modules/@babel/helper-compilation-targets": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", + "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", "dependencies": { - "@octokit/types": "^12.6.0" + "@babel/compat-data": "^7.25.2", + "@babel/helper-validator-option": "^7.24.8", + "browserslist": "^4.23.1", + "lru-cache": "^5.1.1", + "semver": "^6.3.1" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-create-class-features-plugin": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/helper-create-class-features-plugin/-/helper-create-class-features-plugin-7.25.4.tgz", + "integrity": "sha512-ro/bFs3/84MDgDmMwbcHgDa8/E6J3QKNTk4xJJnVeFtGE+tL0K26E3pNxhYz2b67fJpt7Aphw5XcploKXuCvCQ==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-member-expression-to-functions": "^7.24.8", + "@babel/helper-optimise-call-expression": "^7.24.7", + "@babel/helper-replace-supers": "^7.25.0", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/traverse": "^7.25.4", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" }, "peerDependencies": { - "@octokit/core": "5" + "@babel/core": "^7.0.0" } }, - "node_modules/@octokit/plugin-rest-endpoint-methods": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", - "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", + "node_modules/@babel/helper-create-regexp-features-plugin": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-create-regexp-features-plugin/-/helper-create-regexp-features-plugin-7.25.2.tgz", + "integrity": "sha512-+wqVGP+DFmqwFD3EH6TMTfUNeqDehV3E/dl+Sd54eaXqm17tEUNbEIn4sVivVowbvUpOtIGxdo3GoXyDH9N/9g==", "dependencies": { - "@octokit/types": "^12.6.0" + "@babel/helper-annotate-as-pure": "^7.24.7", + "regexpu-core": "^5.3.1", + "semver": "^6.3.1" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" }, "peerDependencies": { - "@octokit/core": "5" + "@babel/core": "^7.0.0" } }, - "node_modules/@octokit/request": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.2.0.tgz", - "integrity": "sha512-exPif6x5uwLqv1N1irkLG1zZNJkOtj8bZxuVHd71U5Ftuxf2wGNvAJyNBcPbPC+EBzwYEbBDdSFb8EPcjpYxPQ==", + "node_modules/@babel/helper-define-polyfill-provider": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/@babel/helper-define-polyfill-provider/-/helper-define-polyfill-provider-0.6.2.tgz", + "integrity": "sha512-LV76g+C502biUK6AyZ3LK10vDpDyCzZnhZFXkH1L75zHPj68+qc8Zfpx2th+gzwA2MzyK+1g/3EPl62yFnVttQ==", "dependencies": { - "@octokit/endpoint": "^9.0.0", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" + "@babel/helper-compilation-targets": "^7.22.6", + "@babel/helper-plugin-utils": "^7.22.5", + "debug": "^4.1.1", + "lodash.debounce": "^4.0.8", + "resolve": "^1.14.2" }, - "engines": { - "node": ">= 18" + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" } }, - "node_modules/@octokit/request-error": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", - "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", + "node_modules/@babel/helper-member-expression-to-functions": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-member-expression-to-functions/-/helper-member-expression-to-functions-7.24.8.tgz", + "integrity": "sha512-LABppdt+Lp/RlBxqrh4qgf1oEH/WxdzQNDJIu5gC/W1GyvPVrOBiItmmM8wan2fm4oYqFuFfkXmlGpLQhPY8CA==", "dependencies": { - "@octokit/types": "^12.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" + "@babel/traverse": "^7.24.8", + "@babel/types": "^7.24.8" }, "engines": { - "node": ">= 18" + "node": ">=6.9.0" } }, - "node_modules/@octokit/types": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", - "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", + "node_modules/@babel/helper-module-imports": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", + "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", "dependencies": { - "@octokit/openapi-types": "^20.0.0" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" } }, - "node_modules/@vercel/ncc": { - "version": "0.36.1", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz", - "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==", - "dev": true, - "bin": { - "ncc": "dist/ncc/cli.js" + "node_modules/@babel/helper-module-transforms": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", + "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", + "dependencies": { + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-simple-access": "^7.24.7", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/@zaproxy/actions-common-scans": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@zaproxy/actions-common-scans/-/actions-common-scans-1.1.0.tgz", - "integrity": "sha512-r+nD3DwcESwd6yJRHOhFGfH30MZL/za8oWfdSy+IP7gxdJtg948eYvli9LlW81FGb7H6sRSfPPWu3CpJuvCygg==", + "node_modules/@babel/helper-optimise-call-expression": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-optimise-call-expression/-/helper-optimise-call-expression-7.24.7.tgz", + "integrity": "sha512-jKiTsW2xmWwxT1ixIdfXUZp+P5yURx2suzLZr5Hi64rURpDYdMW0pv+Uf17EYk2Rd428Lx4tLsnjGJzYKDM/6A==", "dependencies": { - "@actions/artifact": "^1.1.0", - "@actions/github": "^6.0.0", - "adm-zip": "^0.5.10", - "jest-os-detection": "^1.3.1", - "lodash": "^4.17.15" + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" } }, - "node_modules/adm-zip": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.10.tgz", - "integrity": "sha512-x0HvcHqVJNTPk/Bw8JbLWlWoo6Wwnsug0fnYYro1HBrjxZ3G7/AZk7Ahv8JwDe1uIcz8eBqvu86FuF1POiG7vQ==", + "node_modules/@babel/helper-plugin-utils": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", + "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", "engines": { - "node": ">=6.0" + "node": ">=6.9.0" } }, - "node_modules/balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" - }, - "node_modules/before-after-hook": { - "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "node_modules/brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "node_modules/@babel/helper-remap-async-to-generator": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-remap-async-to-generator/-/helper-remap-async-to-generator-7.25.0.tgz", + "integrity": "sha512-NhavI2eWEIz/H9dbrG0TuOicDhNexze43i5z7lEqwYm0WEZVTwnPpA0EafUTP7+6/W79HWIP2cTe3Z5NiSTVpw==", "dependencies": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-wrap-function": "^7.25.0", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" - }, - "node_modules/deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" - }, - "node_modules/fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" - }, - "node_modules/glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "node_modules/@babel/helper-replace-supers": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-replace-supers/-/helper-replace-supers-7.25.0.tgz", + "integrity": "sha512-q688zIvQVYtZu+i2PsdIu/uWGRpfxzr5WESsfpShfZECkO+d2o+WROWezCi/Q6kJ0tfPa5+pUGUlfx2HhrA3Bg==", "dependencies": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "@babel/helper-member-expression-to-functions": "^7.24.8", + "@babel/helper-optimise-call-expression": "^7.24.7", + "@babel/traverse": "^7.25.0" }, "engines": { - "node": "*" + "node": ">=6.9.0" }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "node_modules/@babel/helper-simple-access": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", + "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", "dependencies": { - "once": "^1.3.0", - "wrappy": "1" + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" } }, - "node_modules/inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + "node_modules/@babel/helper-skip-transparent-expression-wrappers": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-skip-transparent-expression-wrappers/-/helper-skip-transparent-expression-wrappers-7.24.7.tgz", + "integrity": "sha512-IO+DLT3LQUElMbpzlatRASEyQtfhSE0+m465v++3jyyXeBTBUjtVZg28/gHeV5mrTJqvEKhKroBGAvhW+qPHiQ==", + "dependencies": { + "@babel/traverse": "^7.24.7", + "@babel/types": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + } }, - "node_modules/jest-os-detection": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jest-os-detection/-/jest-os-detection-1.3.1.tgz", - "integrity": "sha512-1epFqoue/1J8f9SHOegDE4mY9kJBpetcZAeWtB0SG4XjtkI252mOu4TLJMjl1hDb7v6VefKPeWUweu/TsR7ZcQ==" + "node_modules/@babel/helper-string-parser": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", + "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "engines": { + "node": ">=6.9.0" + } }, - "node_modules/lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "node_modules/@babel/helper-validator-identifier": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", + "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "engines": { + "node": ">=6.9.0" + } }, - "node_modules/minimatch": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", - "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", + "node_modules/@babel/helper-validator-option": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", + "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/helper-wrap-function": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/helper-wrap-function/-/helper-wrap-function-7.25.0.tgz", + "integrity": "sha512-s6Q1ebqutSiZnEjaofc/UKDyC4SbzV5n5SrA2Gq8UawLycr3i04f1dX4OzoQVnexm6aOCh37SQNYlJ/8Ku+PMQ==", "dependencies": { - "brace-expansion": "^1.1.7" + "@babel/template": "^7.25.0", + "@babel/traverse": "^7.25.0", + "@babel/types": "^7.25.0" }, "engines": { - "node": "*" + "node": ">=6.9.0" } }, - "node_modules/once": { - "version": "1.4.0", - "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", - "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", + "node_modules/@babel/helpers": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.6.tgz", + "integrity": "sha512-Xg0tn4HcfTijTwfDwYlvVCl43V6h4KyVVX2aEm4qdO/PC6L2YvzLHFdmxhoeSA3eslcE6+ZVXHgWwopXYLNq4Q==", "dependencies": { - "wrappy": "1" + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6" + }, + "engines": { + "node": ">=6.9.0" } }, - "node_modules/path-is-absolute": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "node_modules/@babel/highlight": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", + "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "dependencies": { + "@babel/helper-validator-identifier": "^7.24.7", + "chalk": "^2.4.2", + "js-tokens": "^4.0.0", + "picocolors": "^1.0.0" + }, "engines": { - "node": ">=0.10.0" + "node": ">=6.9.0" } }, - "node_modules/rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", + "node_modules/@babel/parser": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.6.tgz", + "integrity": "sha512-trGdfBdbD0l1ZPmcJ83eNxB9rbEax4ALFTF7fN386TMYbeCQbyme5cOEXQhbGXKebwGaB/J52w1mrklMcbgy6Q==", "dependencies": { - "glob": "^7.1.3" + "@babel/types": "^7.25.6" }, "bin": { - "rimraf": "bin.js" + "parser": "bin/babel-parser.js" }, - "funding": { - "url": "https://github.com/sponsors/isaacs" + "engines": { + "node": ">=6.0.0" } }, - "node_modules/tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", + "node_modules/@babel/plugin-bugfix-firefox-class-in-computed-class-key": { + "version": "7.25.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-firefox-class-in-computed-class-key/-/plugin-bugfix-firefox-class-in-computed-class-key-7.25.3.tgz", + "integrity": "sha512-wUrcsxZg6rqBXG05HG1FPYgsP6EvwF4WpBbxIpWIIYnH8wG0gzx3yZY3dtEHas4sTAOGkbTsc9EGPxwff8lRoA==", "dependencies": { - "rimraf": "^3.0.0" + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.3" }, "engines": { - "node": ">=8.17.0" + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/tmp-promise": { - "version": "3.0.3", - "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", - "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", + "node_modules/@babel/plugin-bugfix-safari-class-field-initializer-scope": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-class-field-initializer-scope/-/plugin-bugfix-safari-class-field-initializer-scope-7.25.0.tgz", + "integrity": "sha512-Bm4bH2qsX880b/3ziJ8KD711LT7z4u8CFudmjqle65AZj/HNUFhEf90dqYv6O86buWvSBmeQDjv0Tn2aF/bIBA==", "dependencies": { - "tmp": "^0.2.0" + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/tunnel": { - "version": "0.0.6", - "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "node_modules/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression/-/plugin-bugfix-safari-id-destructuring-collision-in-function-expression-7.25.0.tgz", + "integrity": "sha512-lXwdNZtTmeVOOFtwM/WDe7yg1PL8sYhRk/XH0FzbR2HDQ0xC+EnQ/JHeoMYSavtU115tnUk0q9CDyq8si+LMAA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, "engines": { - "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" } }, - "node_modules/undici": { - "version": "5.28.4", - "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", - "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", + "node_modules/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining/-/plugin-bugfix-v8-spread-parameters-in-optional-chaining-7.24.7.tgz", + "integrity": "sha512-+izXIbke1T33mY4MSNnrqhPXDz01WYhEf3yF5NbnUtkiNnm+XBZJl3kNfoK6NKmYlz/D07+l2GWVK/QfDkNCuQ==", "dependencies": { - "@fastify/busboy": "^2.0.0" + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/plugin-transform-optional-chaining": "^7.24.7" }, "engines": { - "node": ">=14.0" + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.13.0" } }, - "node_modules/universal-user-agent": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", - "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + "node_modules/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly/-/plugin-bugfix-v8-static-class-fields-redefine-readonly-7.25.0.tgz", + "integrity": "sha512-tggFrk1AIShG/RUQbEwt2Tr/E+ObkfwrPjR6BjbRvsx24+PSjK8zrq0GWPNCjo8qpRx4DuJzlcvWJqlm+0h3kw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "bin": { - "uuid": "dist/bin/uuid" + "node_modules/@babel/plugin-proposal-private-property-in-object": { + "version": "7.21.0-placeholder-for-preset-env.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-proposal-private-property-in-object/-/plugin-proposal-private-property-in-object-7.21.0-placeholder-for-preset-env.2.tgz", + "integrity": "sha512-SOSkfJDddaM7mak6cPEpswyTRnuRltl429hMraQEglW+OkovnCzsiszTmsrlY//qLFjCpQDFRvjdm2wA5pPm9w==", + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "node_modules/wrappy": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", - "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" - } - }, - "dependencies": { - "@actions/artifact": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/artifact/-/artifact-1.1.1.tgz", - "integrity": "sha512-Vv4y0EW0ptEkU+Pjs5RGS/0EryTvI6s79LjSV9Gg/h+O3H/ddpjhuX/Bi/HZE4pbNPyjGtQjbdFWphkZhmgabA==", - "requires": { - "@actions/core": "^1.9.1", - "@actions/http-client": "^2.0.1", - "tmp": "^0.2.1", - "tmp-promise": "^3.0.2" + "node_modules/@babel/plugin-syntax-async-generators": { + "version": "7.8.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-async-generators/-/plugin-syntax-async-generators-7.8.4.tgz", + "integrity": "sha512-tycmZxkGfZaxhMRbXlPXuVFpdWlXpir2W4AMhSJgRKzk/eDlIXOhb2LHWoLpDF7TEHylV5zNhykX6KAgHJmTNw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "@actions/core": { - "version": "1.10.0", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.0.tgz", - "integrity": "sha512-2aZDDa3zrrZbP5ZYg159sNoLRb61nQ7awl5pSvIq5Qpj81vwDzdMRKzkWJGJuwVvWpvZKx7vspJALyvaaIQyug==", - "requires": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" + "node_modules/@babel/plugin-syntax-bigint": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-bigint/-/plugin-syntax-bigint-7.8.3.tgz", + "integrity": "sha512-wnTnFlG+YxQm3vDxpGE57Pj0srRU4sHE/mDkt1qv2YJJSeUAec2ma4WLUnUPeKjyrfntVwe/N6dCXpU+zL3Npg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "@actions/exec": { - "version": "1.1.1", - "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", - "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", - "requires": { - "@actions/io": "^1.0.1" + "node_modules/@babel/plugin-syntax-class-properties": { + "version": "7.12.13", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-properties/-/plugin-syntax-class-properties-7.12.13.tgz", + "integrity": "sha512-fm4idjKla0YahUNgFNLCB0qySdsoPiZP3iQE3rky0mBUtMZ23yDJ9SJdg6dXTSDnulOVqiF3Hgr9nbXvXTQZYA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.12.13" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "@actions/github": { - "version": "6.0.0", - "resolved": "https://registry.npmjs.org/@actions/github/-/github-6.0.0.tgz", - "integrity": "sha512-alScpSVnYmjNEXboZjarjukQEzgCRmjMv6Xj47fsdnqGS73bjJNDpiiXmp8jr0UZLdUB6d9jW63IcmddUP+l0g==", - "requires": { - "@actions/http-client": "^2.2.0", - "@octokit/core": "^5.0.1", - "@octokit/plugin-paginate-rest": "^9.0.0", - "@octokit/plugin-rest-endpoint-methods": "^10.0.0" - } - }, - "@actions/http-client": { - "version": "2.2.1", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.1.tgz", - "integrity": "sha512-KhC/cZsq7f8I4LfZSJKgCvEwfkE8o1538VoBeoGzokVLLnbFDEAdFD3UhoMklxo2un9NJVBdANOresx7vTHlHw==", - "requires": { - "tunnel": "^0.0.6", - "undici": "^5.25.4" + "node_modules/@babel/plugin-syntax-class-static-block": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-class-static-block/-/plugin-syntax-class-static-block-7.14.5.tgz", + "integrity": "sha512-b+YyPmr6ldyNnM6sqYeMWE+bgJcJpO6yS4QD7ymxgH34GBPNDM/THBh8iunyvKIZztiwLH4CJZ0RxTk9emgpjw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" } }, - "@actions/io": { - "version": "1.1.3", - "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", - "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==" + "node_modules/@babel/plugin-syntax-dynamic-import": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-dynamic-import/-/plugin-syntax-dynamic-import-7.8.3.tgz", + "integrity": "sha512-5gdGbFon+PszYzqs83S3E5mpi7/y/8M9eC90MRTZfduQOYW76ig6SOSPNe41IG5LoP3FGBn2N0RjVDSQiS94kQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-export-namespace-from": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-export-namespace-from/-/plugin-syntax-export-namespace-from-7.8.3.tgz", + "integrity": "sha512-MXf5laXo6c1IbEbegDmzGPwGNTsHZmEy6QGznu5Sh2UCWvueywb2ee+CCE4zQiZstxU9BMoQO9i6zUFSY0Kj0Q==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.3" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-assertions": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-assertions/-/plugin-syntax-import-assertions-7.25.6.tgz", + "integrity": "sha512-aABl0jHw9bZ2karQ/uUD6XP4u0SG22SJrOHFoL6XB1R7dTovOP4TzTlsxOYC5yQ1pdscVK2JTUnF6QL3ARoAiQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-attributes": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.25.6.tgz", + "integrity": "sha512-sXaDXaJN9SNLymBdlWFA+bjzBhFD617ZaFiY13dGt7TVslVvVgA6fkZOP7Ki3IGElC45lwHdOTrCtKZGVAWeLQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-import-meta": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-meta/-/plugin-syntax-import-meta-7.10.4.tgz", + "integrity": "sha512-Yqfm+XDx0+Prh3VSeEQCPU81yC+JWZ2pDPFSS4ZdpfZhp4MkFMaDC1UqseovEKwSUpnIL7+vK+Clp7bfh0iD7g==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-json-strings": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-json-strings/-/plugin-syntax-json-strings-7.8.3.tgz", + "integrity": "sha512-lY6kdGpWHvjoe2vk4WrAapEuBR69EMxZl+RoGRhrFGNYVK8mOPAW8VfbT/ZgrFbXlDNiiaxQnAtgVCZ6jv30EA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-logical-assignment-operators": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-logical-assignment-operators/-/plugin-syntax-logical-assignment-operators-7.10.4.tgz", + "integrity": "sha512-d8waShlpFDinQ5MtvGU9xDAOzKH47+FFoney2baFIoMr952hKOLp1HR7VszoZvOsV/4+RRszNY7D17ba0te0ig==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-nullish-coalescing-operator": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-nullish-coalescing-operator/-/plugin-syntax-nullish-coalescing-operator-7.8.3.tgz", + "integrity": "sha512-aSff4zPII1u2QD7y+F8oDsz19ew4IGEJg9SVW+bqwpwtfFleiQDMdzA/R+UlWDzfnHFCxxleFT0PMIrR36XLNQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-numeric-separator": { + "version": "7.10.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-numeric-separator/-/plugin-syntax-numeric-separator-7.10.4.tgz", + "integrity": "sha512-9H6YdfkcK/uOnY/K7/aA2xpzaAgkQn37yzWUMRK7OaPOqOpGS1+n0H5hxT9AUw9EsSjPW8SVyMJwYRtWs3X3ug==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.10.4" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-object-rest-spread": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-object-rest-spread/-/plugin-syntax-object-rest-spread-7.8.3.tgz", + "integrity": "sha512-XoqMijGZb9y3y2XskN+P1wUGiVwWZ5JmoDRwx5+3GmEplNyVM2s2Dg8ILFQm8rWM48orGy5YpI5Bl8U1y7ydlA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-catch-binding": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-catch-binding/-/plugin-syntax-optional-catch-binding-7.8.3.tgz", + "integrity": "sha512-6VPD0Pc1lpTqw0aKoeRTMiB+kWhAoT24PA+ksWSBrFtl5SIRVpZlwN3NNPQjehA2E/91FV3RjLWoVTglWcSV3Q==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-optional-chaining": { + "version": "7.8.3", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-optional-chaining/-/plugin-syntax-optional-chaining-7.8.3.tgz", + "integrity": "sha512-KoK9ErH1MBlCPxV0VANkXW2/dw4vlbGDrFgz8bmUsBGYkFRcbRwMh6cIJubdPrkxRwuGdtCk0v/wPTKbQgBjkg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.8.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-private-property-in-object": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-private-property-in-object/-/plugin-syntax-private-property-in-object-7.14.5.tgz", + "integrity": "sha512-0wVnp9dxJ72ZUJDV27ZfbSj6iHLoytYZmh3rFcxNnvsJF3ktkzLDZPy/mA17HGsaQT3/DQsWYX1f1QGWkCoVUg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-top-level-await": { + "version": "7.14.5", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-top-level-await/-/plugin-syntax-top-level-await-7.14.5.tgz", + "integrity": "sha512-hx++upLv5U1rgYfwe1xBQUhRmU41NEvpUvrp8jkrSCdvGSnM5/qdRMtylJ6PG5OFkBaHkbTAKTnd3/YyESRHFw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-syntax-unicode-sets-regex": { + "version": "7.18.6", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-unicode-sets-regex/-/plugin-syntax-unicode-sets-regex-7.18.6.tgz", + "integrity": "sha512-727YkEAPwSIQTv5im8QHz3upqp92JTWhidIC81Tdx4VJYIte/VndKf1qKrfnnhPLiPghStWfvC/iFaMCQu7Nqg==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.18.6", + "@babel/helper-plugin-utils": "^7.18.6" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-arrow-functions": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-arrow-functions/-/plugin-transform-arrow-functions-7.24.7.tgz", + "integrity": "sha512-Dt9LQs6iEY++gXUwY03DNFat5C2NbO48jj+j/bSAz6b3HgPs39qcPiYt77fDObIcFwj3/C2ICX9YMwGflUoSHQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-generator-functions": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-generator-functions/-/plugin-transform-async-generator-functions-7.25.4.tgz", + "integrity": "sha512-jz8cV2XDDTqjKPwVPJBIjORVEmSGYhdRa8e5k5+vN+uwcjSrSxUaebBRa4ko1jqNF2uxyg8G6XYk30Jv285xzg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-remap-async-to-generator": "^7.25.0", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/traverse": "^7.25.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-async-to-generator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-async-to-generator/-/plugin-transform-async-to-generator-7.24.7.tgz", + "integrity": "sha512-SQY01PcJfmQ+4Ash7NE+rpbLFbmqA2GPIgqzxfFTL4t1FKRq4zTms/7htKpoCUI9OcFYgzqfmCdH53s6/jn5fA==", + "dependencies": { + "@babel/helper-module-imports": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-remap-async-to-generator": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoped-functions": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoped-functions/-/plugin-transform-block-scoped-functions-7.24.7.tgz", + "integrity": "sha512-yO7RAz6EsVQDaBH18IDJcMB1HnrUn2FJ/Jslc/WtPPWcjhpUJXU/rjbwmluzp7v/ZzWcEhTMXELnnsz8djWDwQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-block-scoping": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-block-scoping/-/plugin-transform-block-scoping-7.25.0.tgz", + "integrity": "sha512-yBQjYoOjXlFv9nlXb3f1casSHOZkWr29NX+zChVanLg5Nc157CrbEX9D7hxxtTpuFy7Q0YzmmWfJxzvps4kXrQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-properties": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-properties/-/plugin-transform-class-properties-7.25.4.tgz", + "integrity": "sha512-nZeZHyCWPfjkdU5pA/uHiTaDAFUEqkpzf1YoQT2NeSynCGYq9rxfyI3XpQbfx/a0hSnFH6TGlEXvae5Vi7GD8g==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.25.4", + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-class-static-block": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-class-static-block/-/plugin-transform-class-static-block-7.24.7.tgz", + "integrity": "sha512-HMXK3WbBPpZQufbMG4B46A90PkuuhN9vBCb5T8+VAHqvAqvcLi+2cKoukcpmUYkszLhScU3l1iudhrks3DggRQ==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-class-static-block": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.12.0" + } + }, + "node_modules/@babel/plugin-transform-classes": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-classes/-/plugin-transform-classes-7.25.4.tgz", + "integrity": "sha512-oexUfaQle2pF/b6E0dwsxQtAol9TLSO88kQvym6HHBWFliV2lGdrPieX+WgMRLSJDVzdYywk7jXbLPuO2KLTLg==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-replace-supers": "^7.25.0", + "@babel/traverse": "^7.25.4", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-computed-properties": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-computed-properties/-/plugin-transform-computed-properties-7.24.7.tgz", + "integrity": "sha512-25cS7v+707Gu6Ds2oY6tCkUwsJ9YIDbggd9+cu9jzzDgiNq7hR/8dkzxWfKWnTic26vsI3EsCXNd4iEB6e8esQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/template": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-destructuring": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-destructuring/-/plugin-transform-destructuring-7.24.8.tgz", + "integrity": "sha512-36e87mfY8TnRxc7yc6M9g9gOB7rKgSahqkIKwLpz4Ppk2+zC2Cy1is0uwtuSG6AE4zlTOUa+7JGz9jCJGLqQFQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-dotall-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dotall-regex/-/plugin-transform-dotall-regex-7.24.7.tgz", + "integrity": "sha512-ZOA3W+1RRTSWvyqcMJDLqbchh7U4NRGqwRfFSVbOLS/ePIP4vHB5e8T8eXcuqyN1QkgKyj5wuW0lcS85v4CrSw==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-keys": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-keys/-/plugin-transform-duplicate-keys-7.24.7.tgz", + "integrity": "sha512-JdYfXyCRihAe46jUIliuL2/s0x0wObgwwiGxw/UbgJBr20gQBThrokO4nYKgWkD7uBaqM7+9x5TU7NkExZJyzw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-duplicate-named-capturing-groups-regex": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-duplicate-named-capturing-groups-regex/-/plugin-transform-duplicate-named-capturing-groups-regex-7.25.0.tgz", + "integrity": "sha512-YLpb4LlYSc3sCUa35un84poXoraOiQucUTTu8X1j18JV+gNa8E0nyUf/CjZ171IRGr4jEguF+vzJU66QZhn29g==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.25.0", + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-dynamic-import": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-dynamic-import/-/plugin-transform-dynamic-import-7.24.7.tgz", + "integrity": "sha512-sc3X26PhZQDb3JhORmakcbvkeInvxz+A8oda99lj7J60QRuPZvNAk9wQlTBS1ZynelDrDmTU4pw1tyc5d5ZMUg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-dynamic-import": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-exponentiation-operator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-exponentiation-operator/-/plugin-transform-exponentiation-operator-7.24.7.tgz", + "integrity": "sha512-Rqe/vSc9OYgDajNIK35u7ot+KeCoetqQYFXM4Epf7M7ez3lWlOjrDjrwMei6caCVhfdw+mIKD4cgdGNy5JQotQ==", + "dependencies": { + "@babel/helper-builder-binary-assignment-operator-visitor": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-export-namespace-from": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-export-namespace-from/-/plugin-transform-export-namespace-from-7.24.7.tgz", + "integrity": "sha512-v0K9uNYsPL3oXZ/7F9NNIbAj2jv1whUEtyA6aujhekLs56R++JDQuzRcP2/z4WX5Vg/c5lE9uWZA0/iUoFhLTA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-for-of": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-for-of/-/plugin-transform-for-of-7.24.7.tgz", + "integrity": "sha512-wo9ogrDG1ITTTBsy46oGiN1dS9A7MROBTcYsfS8DtsImMkHk9JXJ3EWQM6X2SUw4x80uGPlwj0o00Uoc6nEE3g==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-function-name": { + "version": "7.25.1", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-function-name/-/plugin-transform-function-name-7.25.1.tgz", + "integrity": "sha512-TVVJVdW9RKMNgJJlLtHsKDTydjZAbwIsn6ySBPQaEAUU5+gVvlJt/9nRmqVbsV/IBanRjzWoaAQKLoamWVOUuA==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.24.8", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/traverse": "^7.25.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-json-strings": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-json-strings/-/plugin-transform-json-strings-7.24.7.tgz", + "integrity": "sha512-2yFnBGDvRuxAaE/f0vfBKvtnvvqU8tGpMHqMNpTN2oWMKIR3NqFkjaAgGwawhqK/pIN2T3XdjGPdaG0vDhOBGw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-json-strings": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-literals": { + "version": "7.25.2", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-literals/-/plugin-transform-literals-7.25.2.tgz", + "integrity": "sha512-HQI+HcTbm9ur3Z2DkO+jgESMAMcYLuN/A7NRw9juzxAezN9AvqvUTnpKP/9kkYANz6u7dFlAyOu44ejuGySlfw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-logical-assignment-operators": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-logical-assignment-operators/-/plugin-transform-logical-assignment-operators-7.24.7.tgz", + "integrity": "sha512-4D2tpwlQ1odXmTEIFWy9ELJcZHqrStlzK/dAOWYyxX3zT0iXQB6banjgeOJQXzEc4S0E0a5A+hahxPaEFYftsw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-member-expression-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-member-expression-literals/-/plugin-transform-member-expression-literals-7.24.7.tgz", + "integrity": "sha512-T/hRC1uqrzXMKLQ6UCwMT85S3EvqaBXDGf0FaMf4446Qx9vKwlghvee0+uuZcDUCZU5RuNi4781UQ7R308zzBw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-amd": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-amd/-/plugin-transform-modules-amd-7.24.7.tgz", + "integrity": "sha512-9+pB1qxV3vs/8Hdmz/CulFB8w2tuu6EB94JZFsjdqxQokwGa9Unap7Bo2gGBGIvPmDIVvQrom7r5m/TCDMURhg==", + "dependencies": { + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-commonjs": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-commonjs/-/plugin-transform-modules-commonjs-7.24.8.tgz", + "integrity": "sha512-WHsk9H8XxRs3JXKWFiqtQebdh9b/pTk4EgueygFzYlTKAg0Ud985mSevdNjdXdFBATSKVJGQXP1tv6aGbssLKA==", + "dependencies": { + "@babel/helper-module-transforms": "^7.24.8", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-simple-access": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-systemjs": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-systemjs/-/plugin-transform-modules-systemjs-7.25.0.tgz", + "integrity": "sha512-YPJfjQPDXxyQWg/0+jHKj1llnY5f/R6a0p/vP4lPymxLu7Lvl4k2WMitqi08yxwQcCVUUdG9LCUj4TNEgAp3Jw==", + "dependencies": { + "@babel/helper-module-transforms": "^7.25.0", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "@babel/traverse": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-modules-umd": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-modules-umd/-/plugin-transform-modules-umd-7.24.7.tgz", + "integrity": "sha512-3aytQvqJ/h9z4g8AsKPLvD4Zqi2qT+L3j7XoFFu1XBlZWEl2/1kWnhmAbxpLgPrHSY0M6UA02jyTiwUVtiKR6A==", + "dependencies": { + "@babel/helper-module-transforms": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-named-capturing-groups-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-named-capturing-groups-regex/-/plugin-transform-named-capturing-groups-regex-7.24.7.tgz", + "integrity": "sha512-/jr7h/EWeJtk1U/uz2jlsCioHkZk1JJZVcc8oQsJ1dUlaJD83f4/6Zeh2aHt9BIFokHIsSeDfhUmju0+1GPd6g==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/plugin-transform-new-target": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-new-target/-/plugin-transform-new-target-7.24.7.tgz", + "integrity": "sha512-RNKwfRIXg4Ls/8mMTza5oPF5RkOW8Wy/WgMAp1/F1yZ8mMbtwXW+HDoJiOsagWrAhI5f57Vncrmr9XeT4CVapA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-nullish-coalescing-operator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-nullish-coalescing-operator/-/plugin-transform-nullish-coalescing-operator-7.24.7.tgz", + "integrity": "sha512-Ts7xQVk1OEocqzm8rHMXHlxvsfZ0cEF2yomUqpKENHWMF4zKk175Y4q8H5knJes6PgYad50uuRmt3UJuhBw8pQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-numeric-separator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-numeric-separator/-/plugin-transform-numeric-separator-7.24.7.tgz", + "integrity": "sha512-e6q1TiVUzvH9KRvicuxdBTUj4AdKSRwzIyFFnfnezpCfP2/7Qmbb8qbU2j7GODbl4JMkblitCQjKYUaX/qkkwA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-numeric-separator": "^7.10.4" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-rest-spread": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-rest-spread/-/plugin-transform-object-rest-spread-7.24.7.tgz", + "integrity": "sha512-4QrHAr0aXQCEFni2q4DqKLD31n2DL+RxcwnNjDFkSG0eNQ/xCavnRkfCUjsyqGC2OviNJvZOF/mQqZBw7i2C5Q==", + "dependencies": { + "@babel/helper-compilation-targets": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-transform-parameters": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-object-super": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-object-super/-/plugin-transform-object-super-7.24.7.tgz", + "integrity": "sha512-A/vVLwN6lBrMFmMDmPPz0jnE6ZGx7Jq7d6sT/Ev4H65RER6pZ+kczlf1DthF5N0qaPHBsI7UXiE8Zy66nmAovg==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-replace-supers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-catch-binding": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-catch-binding/-/plugin-transform-optional-catch-binding-7.24.7.tgz", + "integrity": "sha512-uLEndKqP5BfBbC/5jTwPxLh9kqPWWgzN/f8w6UwAIirAEqiIVJWWY312X72Eub09g5KF9+Zn7+hT7sDxmhRuKA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-optional-chaining": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-optional-chaining/-/plugin-transform-optional-chaining-7.24.8.tgz", + "integrity": "sha512-5cTOLSMs9eypEy8JUVvIKOu6NgvbJMnpG62VpIHrTmROdQ+L5mDAaI40g25k5vXti55JWNX5jCkq3HZxXBQANw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7", + "@babel/plugin-syntax-optional-chaining": "^7.8.3" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-parameters": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-parameters/-/plugin-transform-parameters-7.24.7.tgz", + "integrity": "sha512-yGWW5Rr+sQOhK0Ot8hjDJuxU3XLRQGflvT4lhlSY0DFvdb3TwKaY26CJzHtYllU0vT9j58hc37ndFPsqT1SrzA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-methods": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-methods/-/plugin-transform-private-methods-7.25.4.tgz", + "integrity": "sha512-ao8BG7E2b/URaUQGqN3Tlsg+M3KlHY6rJ1O1gXAEUnZoyNQnvKyH87Kfg+FoxSeyWUB8ISZZsC91C44ZuBFytw==", + "dependencies": { + "@babel/helper-create-class-features-plugin": "^7.25.4", + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-private-property-in-object": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-private-property-in-object/-/plugin-transform-private-property-in-object-7.24.7.tgz", + "integrity": "sha512-9z76mxwnwFxMyxZWEgdgECQglF2Q7cFLm0kMf8pGwt+GSJsY0cONKj/UuO4bOH0w/uAel3ekS4ra5CEAyJRmDA==", + "dependencies": { + "@babel/helper-annotate-as-pure": "^7.24.7", + "@babel/helper-create-class-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-property-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-property-literals/-/plugin-transform-property-literals-7.24.7.tgz", + "integrity": "sha512-EMi4MLQSHfd2nrCqQEWxFdha2gBCqU4ZcCng4WBGZ5CJL4bBRW0ptdqqDdeirGZcpALazVVNJqRmsO8/+oNCBA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-regenerator": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-regenerator/-/plugin-transform-regenerator-7.24.7.tgz", + "integrity": "sha512-lq3fvXPdimDrlg6LWBoqj+r/DEWgONuwjuOuQCSYgRroXDH/IdM1C0IZf59fL5cHLpjEH/O6opIRBbqv7ELnuA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "regenerator-transform": "^0.15.2" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-reserved-words": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-reserved-words/-/plugin-transform-reserved-words-7.24.7.tgz", + "integrity": "sha512-0DUq0pHcPKbjFZCfTss/pGkYMfy3vFWydkUBd9r0GHpIyfs2eCDENvqadMycRS9wZCXR41wucAfJHJmwA0UmoQ==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-shorthand-properties": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-shorthand-properties/-/plugin-transform-shorthand-properties-7.24.7.tgz", + "integrity": "sha512-KsDsevZMDsigzbA09+vacnLpmPH4aWjcZjXdyFKGzpplxhbeB4wYtury3vglQkg6KM/xEPKt73eCjPPf1PgXBA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-spread": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-spread/-/plugin-transform-spread-7.24.7.tgz", + "integrity": "sha512-x96oO0I09dgMDxJaANcRyD4ellXFLLiWhuwDxKZX5g2rWP1bTPkBSwCYv96VDXVT1bD9aPj8tppr5ITIh8hBng==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7", + "@babel/helper-skip-transparent-expression-wrappers": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-sticky-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-sticky-regex/-/plugin-transform-sticky-regex-7.24.7.tgz", + "integrity": "sha512-kHPSIJc9v24zEml5geKg9Mjx5ULpfncj0wRpYtxbvKyTtHCYDkVE3aHQ03FrpEo4gEe2vrJJS1Y9CJTaThA52g==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-template-literals": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-template-literals/-/plugin-transform-template-literals-7.24.7.tgz", + "integrity": "sha512-AfDTQmClklHCOLxtGoP7HkeMw56k1/bTQjwsfhL6pppo/M4TOBSq+jjBUBLmV/4oeFg4GWMavIl44ZeCtmmZTw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-typeof-symbol": { + "version": "7.24.8", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-typeof-symbol/-/plugin-transform-typeof-symbol-7.24.8.tgz", + "integrity": "sha512-adNTUpDCVnmAE58VEqKlAA6ZBlNkMnWD0ZcW76lyNFN3MJniyGFZfNwERVk8Ap56MCnXztmDr19T4mPTztcuaw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-escapes": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-escapes/-/plugin-transform-unicode-escapes-7.24.7.tgz", + "integrity": "sha512-U3ap1gm5+4edc2Q/P+9VrBNhGkfnf+8ZqppY71Bo/pzZmXhhLdqgaUl6cuB07O1+AQJtCLfaOmswiNbSQ9ivhw==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-property-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-property-regex/-/plugin-transform-unicode-property-regex-7.24.7.tgz", + "integrity": "sha512-uH2O4OV5M9FZYQrwc7NdVmMxQJOCCzFeYudlZSzUAHRFeOujQefa92E74TQDVskNHCzOXoigEuoyzHDhaEaK5w==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-regex": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-regex/-/plugin-transform-unicode-regex-7.24.7.tgz", + "integrity": "sha512-hlQ96MBZSAXUq7ltkjtu3FJCCSMx/j629ns3hA3pXnBXjanNP0LHi+JpPeA81zaWgVK1VGH95Xuy7u0RyQ8kMg==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.24.7", + "@babel/helper-plugin-utils": "^7.24.7" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/plugin-transform-unicode-sets-regex": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/plugin-transform-unicode-sets-regex/-/plugin-transform-unicode-sets-regex-7.25.4.tgz", + "integrity": "sha512-qesBxiWkgN1Q+31xUE9RcMk79eOXXDCv6tfyGMRSs4RGlioSg2WVyQAm07k726cSE56pa+Kb0y9epX2qaXzTvA==", + "dependencies": { + "@babel/helper-create-regexp-features-plugin": "^7.25.2", + "@babel/helper-plugin-utils": "^7.24.8" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/@babel/preset-env": { + "version": "7.25.4", + "resolved": "https://registry.npmjs.org/@babel/preset-env/-/preset-env-7.25.4.tgz", + "integrity": "sha512-W9Gyo+KmcxjGahtt3t9fb14vFRWvPpu5pT6GBlovAK6BTBcxgjfVMSQCfJl4oi35ODrxP6xx2Wr8LNST57Mraw==", + "dependencies": { + "@babel/compat-data": "^7.25.4", + "@babel/helper-compilation-targets": "^7.25.2", + "@babel/helper-plugin-utils": "^7.24.8", + "@babel/helper-validator-option": "^7.24.8", + "@babel/plugin-bugfix-firefox-class-in-computed-class-key": "^7.25.3", + "@babel/plugin-bugfix-safari-class-field-initializer-scope": "^7.25.0", + "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": "^7.25.0", + "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": "^7.24.7", + "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": "^7.25.0", + "@babel/plugin-proposal-private-property-in-object": "7.21.0-placeholder-for-preset-env.2", + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-dynamic-import": "^7.8.3", + "@babel/plugin-syntax-export-namespace-from": "^7.8.3", + "@babel/plugin-syntax-import-assertions": "^7.24.7", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5", + "@babel/plugin-syntax-unicode-sets-regex": "^7.18.6", + "@babel/plugin-transform-arrow-functions": "^7.24.7", + "@babel/plugin-transform-async-generator-functions": "^7.25.4", + "@babel/plugin-transform-async-to-generator": "^7.24.7", + "@babel/plugin-transform-block-scoped-functions": "^7.24.7", + "@babel/plugin-transform-block-scoping": "^7.25.0", + "@babel/plugin-transform-class-properties": "^7.25.4", + "@babel/plugin-transform-class-static-block": "^7.24.7", + "@babel/plugin-transform-classes": "^7.25.4", + "@babel/plugin-transform-computed-properties": "^7.24.7", + "@babel/plugin-transform-destructuring": "^7.24.8", + "@babel/plugin-transform-dotall-regex": "^7.24.7", + "@babel/plugin-transform-duplicate-keys": "^7.24.7", + "@babel/plugin-transform-duplicate-named-capturing-groups-regex": "^7.25.0", + "@babel/plugin-transform-dynamic-import": "^7.24.7", + "@babel/plugin-transform-exponentiation-operator": "^7.24.7", + "@babel/plugin-transform-export-namespace-from": "^7.24.7", + "@babel/plugin-transform-for-of": "^7.24.7", + "@babel/plugin-transform-function-name": "^7.25.1", + "@babel/plugin-transform-json-strings": "^7.24.7", + "@babel/plugin-transform-literals": "^7.25.2", + "@babel/plugin-transform-logical-assignment-operators": "^7.24.7", + "@babel/plugin-transform-member-expression-literals": "^7.24.7", + "@babel/plugin-transform-modules-amd": "^7.24.7", + "@babel/plugin-transform-modules-commonjs": "^7.24.8", + "@babel/plugin-transform-modules-systemjs": "^7.25.0", + "@babel/plugin-transform-modules-umd": "^7.24.7", + "@babel/plugin-transform-named-capturing-groups-regex": "^7.24.7", + "@babel/plugin-transform-new-target": "^7.24.7", + "@babel/plugin-transform-nullish-coalescing-operator": "^7.24.7", + "@babel/plugin-transform-numeric-separator": "^7.24.7", + "@babel/plugin-transform-object-rest-spread": "^7.24.7", + "@babel/plugin-transform-object-super": "^7.24.7", + "@babel/plugin-transform-optional-catch-binding": "^7.24.7", + "@babel/plugin-transform-optional-chaining": "^7.24.8", + "@babel/plugin-transform-parameters": "^7.24.7", + "@babel/plugin-transform-private-methods": "^7.25.4", + "@babel/plugin-transform-private-property-in-object": "^7.24.7", + "@babel/plugin-transform-property-literals": "^7.24.7", + "@babel/plugin-transform-regenerator": "^7.24.7", + "@babel/plugin-transform-reserved-words": "^7.24.7", + "@babel/plugin-transform-shorthand-properties": "^7.24.7", + "@babel/plugin-transform-spread": "^7.24.7", + "@babel/plugin-transform-sticky-regex": "^7.24.7", + "@babel/plugin-transform-template-literals": "^7.24.7", + "@babel/plugin-transform-typeof-symbol": "^7.24.8", + "@babel/plugin-transform-unicode-escapes": "^7.24.7", + "@babel/plugin-transform-unicode-property-regex": "^7.24.7", + "@babel/plugin-transform-unicode-regex": "^7.24.7", + "@babel/plugin-transform-unicode-sets-regex": "^7.25.4", + "@babel/preset-modules": "0.1.6-no-external-plugins", + "babel-plugin-polyfill-corejs2": "^0.4.10", + "babel-plugin-polyfill-corejs3": "^0.10.6", + "babel-plugin-polyfill-regenerator": "^0.6.1", + "core-js-compat": "^3.37.1", + "semver": "^6.3.1" + }, + "engines": { + "node": ">=6.9.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0" + } + }, + "node_modules/@babel/preset-modules": { + "version": "0.1.6-no-external-plugins", + "resolved": "https://registry.npmjs.org/@babel/preset-modules/-/preset-modules-0.1.6-no-external-plugins.tgz", + "integrity": "sha512-HrcgcIESLm9aIR842yhJ5RWan/gebQUJ6E/E5+rf0y9o6oj7w0Br+sWuL6kEQ/o/AdfvR1Je9jG18/gnpwjEyA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@babel/types": "^7.4.4", + "esutils": "^2.0.2" + }, + "peerDependencies": { + "@babel/core": "^7.0.0-0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/@babel/regjsgen": { + "version": "0.8.0", + "resolved": "https://registry.npmjs.org/@babel/regjsgen/-/regjsgen-0.8.0.tgz", + "integrity": "sha512-x/rqGMdzj+fWZvCOYForTghzbtqPDZ5gPwaoNGHdgDfF2QA/XZbCBp4Moo5scrkAMPhB7z26XM/AaHuIJdgauA==" + }, + "node_modules/@babel/runtime": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/runtime/-/runtime-7.25.6.tgz", + "integrity": "sha512-VBj9MYyDb9tuLq7yzqjgzt6Q+IBQLrGZfdjOekyEirZPHxXWoTSGUTMrpsfi58Up73d13NfYLv8HT9vmznjzhQ==", + "dependencies": { + "regenerator-runtime": "^0.14.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/template": { + "version": "7.25.0", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", + "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/parser": "^7.25.0", + "@babel/types": "^7.25.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/traverse": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.6.tgz", + "integrity": "sha512-9Vrcx5ZW6UwK5tvqsj0nGpp/XzqthkT0dqIc9g1AdtygFToNtTF67XzYS//dm+SAK9cp3B9R4ZO/46p63SCjlQ==", + "dependencies": { + "@babel/code-frame": "^7.24.7", + "@babel/generator": "^7.25.6", + "@babel/parser": "^7.25.6", + "@babel/template": "^7.25.0", + "@babel/types": "^7.25.6", + "debug": "^4.3.1", + "globals": "^11.1.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@babel/types": { + "version": "7.25.6", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.6.tgz", + "integrity": "sha512-/l42B1qxpG6RdfYf343Uw1vmDjeNhneUXtzhojE7pDgfpEypmRhI6j1kr17XCVv4Cgl9HdAiQY2x0GwKm7rWCw==", + "dependencies": { + "@babel/helper-string-parser": "^7.24.8", + "@babel/helper-validator-identifier": "^7.24.7", + "to-fast-properties": "^2.0.0" + }, + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/@fastify/busboy": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", + "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==", + "engines": { + "node": ">=14" + } + }, + "node_modules/@istanbuljs/load-nyc-config": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@istanbuljs/load-nyc-config/-/load-nyc-config-1.1.0.tgz", + "integrity": "sha512-VjeHSlIzpv/NyD3N0YuHfXOPDIixcA1q2ZV98wsMqcYlPmv2n3Yb2lYP9XMElnaFVXg5A7YLTeLu6V84uQDjmQ==", + "dependencies": { + "camelcase": "^5.3.1", + "find-up": "^4.1.0", + "get-package-type": "^0.1.0", + "js-yaml": "^3.13.1", + "resolve-from": "^5.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@istanbuljs/schema": { + "version": "0.1.3", + "resolved": "https://registry.npmjs.org/@istanbuljs/schema/-/schema-0.1.3.tgz", + "integrity": "sha512-ZXRY4jNvVgSVQ8DL3LTcakaAtXwTVUxE81hslsyD2AtoXW/wVob10HkOJ1X/pAlcI7D+2YoZKg5do8G/w6RYgA==", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/schemas": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/schemas/-/schemas-29.6.3.tgz", + "integrity": "sha512-mo5j5X+jIZmJQveBKeS/clAueipV7KgiX1vMgCxam1RNYiqE1w62n0/tJJnHtjW8ZHcQco5gY85jA3mi0L+nSA==", + "dependencies": { + "@sinclair/typebox": "^0.27.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/@jest/transform/-/transform-29.7.0.tgz", + "integrity": "sha512-ok/BTPFzFKVMwO5eOHRrvnBVHdRy9IrsrW1GpMaQ9MCnilNLXQKmAX8s1YXDFaai9xJpac2ySzV0YeRRECr2Vw==", + "dependencies": { + "@babel/core": "^7.11.6", + "@jest/types": "^29.6.3", + "@jridgewell/trace-mapping": "^0.3.18", + "babel-plugin-istanbul": "^6.1.1", + "chalk": "^4.0.0", + "convert-source-map": "^2.0.0", + "fast-json-stable-stringify": "^2.1.0", + "graceful-fs": "^4.2.9", + "jest-haste-map": "^29.7.0", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "micromatch": "^4.0.4", + "pirates": "^4.0.4", + "slash": "^3.0.0", + "write-file-atomic": "^4.0.2" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/transform/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/transform/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/transform/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/transform/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/@jest/transform/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/transform/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/@jest/types/-/types-29.6.3.tgz", + "integrity": "sha512-u3UPsIilWKOM3F9CXtrG8LEJmNxwoCQC/XVj4IKYXvvpx7QIi/Kg1LI5uDmDpKlac62NUtX7eLjRh+jVZcLOzw==", + "dependencies": { + "@jest/schemas": "^29.6.3", + "@types/istanbul-lib-coverage": "^2.0.0", + "@types/istanbul-reports": "^3.0.0", + "@types/node": "*", + "@types/yargs": "^17.0.8", + "chalk": "^4.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/@jest/types/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/@jest/types/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/@jest/types/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/@jest/types/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/@jest/types/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/@jridgewell/gen-mapping": { + "version": "0.3.5", + "resolved": "https://registry.npmjs.org/@jridgewell/gen-mapping/-/gen-mapping-0.3.5.tgz", + "integrity": "sha512-IzL8ZoEDIBRWEzlCcRhOaCupYyN5gdIK+Q6fbFdPDg6HqX6jpkItn7DFIpW9LQzXG6Df9sA7+OKnq0qlz/GaQg==", + "dependencies": { + "@jridgewell/set-array": "^1.2.1", + "@jridgewell/sourcemap-codec": "^1.4.10", + "@jridgewell/trace-mapping": "^0.3.24" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/resolve-uri": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/@jridgewell/resolve-uri/-/resolve-uri-3.1.2.tgz", + "integrity": "sha512-bRISgCIjP20/tbWSPWMEi54QVPRZExkuD9lJL+UIxUKtwVJA8wW1Trb1jMs1RFXo1CBTNZ/5hpC9QvmKWdopKw==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/set-array": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@jridgewell/set-array/-/set-array-1.2.1.tgz", + "integrity": "sha512-R8gLRTZeyp03ymzP/6Lil/28tGeGEzhx1q2k703KGWRAI1VdvPIXdG70VJc2pAMw3NA6JKL5hhFu1sJX0Mnn/A==", + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.5.0", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.5.0.tgz", + "integrity": "sha512-gv3ZRaISU3fjPAgNsriBRqGWQL6quFx04YMPW/zD8XMLsU32mhCCbfbO6KZFLjvYpCZ8zyDEgqsgf+PwPaM7GQ==" + }, + "node_modules/@jridgewell/trace-mapping": { + "version": "0.3.25", + "resolved": "https://registry.npmjs.org/@jridgewell/trace-mapping/-/trace-mapping-0.3.25.tgz", + "integrity": "sha512-vNk6aEwybGtawWmy/PzwnGDOjCkLWSD2wqvjGGAgOAwCGWySYXfYoxt00IJkTF+8Lb57DwOb3Aa0o9CApepiYQ==", + "dependencies": { + "@jridgewell/resolve-uri": "^3.1.0", + "@jridgewell/sourcemap-codec": "^1.4.14" + } + }, + "node_modules/@octokit/auth-token": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-5.1.1.tgz", + "integrity": "sha512-rh3G3wDO8J9wSjfI436JUKzHIxq8NaiL0tVeB2aXmG6p/9859aUOAjA9pmSPNGGZxfwmaJ9ozOJImuNVJdpvbA==", + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/core": { + "version": "6.1.2", + "resolved": "https://registry.npmjs.org/@octokit/core/-/core-6.1.2.tgz", + "integrity": "sha512-hEb7Ma4cGJGEUNOAVmyfdB/3WirWMg5hDuNFVejGEDFqupeOysLc2sG6HJxY2etBp5YQu5Wtxwi020jS9xlUwg==", + "dependencies": { + "@octokit/auth-token": "^5.0.0", + "@octokit/graphql": "^8.0.0", + "@octokit/request": "^9.0.0", + "@octokit/request-error": "^6.0.1", + "@octokit/types": "^13.0.0", + "before-after-hook": "^3.0.2", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/endpoint": { + "version": "10.1.1", + "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-10.1.1.tgz", + "integrity": "sha512-JYjh5rMOwXMJyUpj028cu0Gbp7qe/ihxfJMLc8VZBMMqSwLgOxDI1911gV4Enl1QSavAQNJcwmwBF9M0VvLh6Q==", + "dependencies": { + "@octokit/types": "^13.0.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/graphql": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-8.1.1.tgz", + "integrity": "sha512-ukiRmuHTi6ebQx/HFRCXKbDlOh/7xEV6QUXaE7MJEKGNAncGI/STSbOkl12qVXZrfZdpXctx5O9X1AIaebiDBg==", + "dependencies": { + "@octokit/request": "^9.0.0", + "@octokit/types": "^13.0.0", + "universal-user-agent": "^7.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/openapi-types": { + "version": "22.2.0", + "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-22.2.0.tgz", + "integrity": "sha512-QBhVjcUa9W7Wwhm6DBFu6ZZ+1/t/oYxqc2tp81Pi41YNuJinbFRx8B133qVOrAaBbF7D/m0Et6f9/pZt9Rc+tg==" + }, + "node_modules/@octokit/plugin-paginate-rest": { + "version": "11.3.3", + "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-11.3.3.tgz", + "integrity": "sha512-o4WRoOJZlKqEEgj+i9CpcmnByvtzoUYC6I8PD2SA95M+BJ2x8h7oLcVOg9qcowWXBOdcTRsMZiwvM3EyLm9AfA==", + "dependencies": { + "@octokit/types": "^13.5.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-request-log": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-request-log/-/plugin-request-log-5.3.1.tgz", + "integrity": "sha512-n/lNeCtq+9ofhC15xzmJCNKP2BWTv8Ih2TTy+jatNCCq/gQP/V7rK3fjIfuz0pDWDALO/o/4QY4hyOF6TQQFUw==", + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-rest-endpoint-methods": { + "version": "13.2.4", + "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-13.2.4.tgz", + "integrity": "sha512-gusyAVgTrPiuXOdfqOySMDztQHv6928PQ3E4dqVGEtOvRXAKRbJR4b1zQyniIT9waqaWk/UDaoJ2dyPr7Bk7Iw==", + "dependencies": { + "@octokit/types": "^13.5.0" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-retry": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/@octokit/plugin-retry/-/plugin-retry-7.1.2.tgz", + "integrity": "sha512-XOWnPpH2kJ5VTwozsxGurw+svB2e61aWlmk5EVIYZPwFK5F9h4cyPyj9CIKRyMXMHSwpIsI3mPOdpMmrRhe7UQ==", + "dependencies": { + "@octokit/request-error": "^6.0.0", + "@octokit/types": "^13.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": ">=6" + } + }, + "node_modules/@octokit/plugin-throttling": { + "version": "9.3.1", + "resolved": "https://registry.npmjs.org/@octokit/plugin-throttling/-/plugin-throttling-9.3.1.tgz", + "integrity": "sha512-Qd91H4liUBhwLB2h6jZ99bsxoQdhgPk6TdwnClPyTBSDAdviGPceViEgUwj+pcQDmB/rfAXAXK7MTochpHM3yQ==", + "dependencies": { + "@octokit/types": "^13.0.0", + "bottleneck": "^2.15.3" + }, + "engines": { + "node": ">= 18" + }, + "peerDependencies": { + "@octokit/core": "^6.0.0" + } + }, + "node_modules/@octokit/request": { + "version": "9.1.3", + "resolved": "https://registry.npmjs.org/@octokit/request/-/request-9.1.3.tgz", + "integrity": "sha512-V+TFhu5fdF3K58rs1pGUJIDH5RZLbZm5BI+MNF+6o/ssFNT4vWlCh/tVpF3NxGtP15HUxTTMUbsG5llAuU2CZA==", + "dependencies": { + "@octokit/endpoint": "^10.0.0", + "@octokit/request-error": "^6.0.1", + "@octokit/types": "^13.1.0", + "universal-user-agent": "^7.0.2" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/request-error": { + "version": "6.1.4", + "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-6.1.4.tgz", + "integrity": "sha512-VpAhIUxwhWZQImo/dWAN/NpPqqojR6PSLgLYAituLM6U+ddx9hCioFGwBr5Mi+oi5CLeJkcAs3gJ0PYYzU6wUg==", + "dependencies": { + "@octokit/types": "^13.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/rest": { + "version": "21.0.2", + "resolved": "https://registry.npmjs.org/@octokit/rest/-/rest-21.0.2.tgz", + "integrity": "sha512-+CiLisCoyWmYicH25y1cDfCrv41kRSvTq6pPWtRroRJzhsCZWZyCqGyI8foJT5LmScADSwRAnr/xo+eewL04wQ==", + "dependencies": { + "@octokit/core": "^6.1.2", + "@octokit/plugin-paginate-rest": "^11.0.0", + "@octokit/plugin-request-log": "^5.3.1", + "@octokit/plugin-rest-endpoint-methods": "^13.0.0" + }, + "engines": { + "node": ">= 18" + } + }, + "node_modules/@octokit/types": { + "version": "13.5.0", + "resolved": "https://registry.npmjs.org/@octokit/types/-/types-13.5.0.tgz", + "integrity": "sha512-HdqWTf5Z3qwDVlzCrP8UJquMwunpDiMPt5er+QjGzL4hqr/vBVY/MauQgS1xWxCDT1oMx1EULyqxncdCY/NVSQ==", + "dependencies": { + "@octokit/openapi-types": "^22.2.0" + } + }, + "node_modules/@sinclair/typebox": { + "version": "0.27.8", + "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", + "integrity": "sha512-+Fj43pSMwJs4KRrH/938Uf+uAELIgVBmQzg/q1YG10djyfA3TnrU8N8XzqCh/okZdszqBQTZf96idMfE5lnwTA==" + }, + "node_modules/@types/babel__core": { + "version": "7.20.5", + "resolved": "https://registry.npmjs.org/@types/babel__core/-/babel__core-7.20.5.tgz", + "integrity": "sha512-qoQprZvz5wQFJwMDqeseRXWv3rqMvhgpbXFfVyWhbx9X47POIA6i/+dXefEmZKoAgOaTdaIgNSMqMIU61yRyzA==", + "dependencies": { + "@babel/parser": "^7.20.7", + "@babel/types": "^7.20.7", + "@types/babel__generator": "*", + "@types/babel__template": "*", + "@types/babel__traverse": "*" + } + }, + "node_modules/@types/babel__generator": { + "version": "7.6.8", + "resolved": "https://registry.npmjs.org/@types/babel__generator/-/babel__generator-7.6.8.tgz", + "integrity": "sha512-ASsj+tpEDsEiFr1arWrlN6V3mdfjRMZt6LtK/Vp/kreFLnr5QH5+DhvD5nINYZXzwJvXeGq+05iUXcAzVrqWtw==", + "dependencies": { + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__template": { + "version": "7.4.4", + "resolved": "https://registry.npmjs.org/@types/babel__template/-/babel__template-7.4.4.tgz", + "integrity": "sha512-h/NUaSyG5EyxBIp8YRxo4RMe2/qQgvyowRwVMzhYhBCONbW8PUsg4lkFMrhgZhUe5z3L3MiLDuvyJ/CaPa2A8A==", + "dependencies": { + "@babel/parser": "^7.1.0", + "@babel/types": "^7.0.0" + } + }, + "node_modules/@types/babel__traverse": { + "version": "7.20.6", + "resolved": "https://registry.npmjs.org/@types/babel__traverse/-/babel__traverse-7.20.6.tgz", + "integrity": "sha512-r1bzfrm0tomOI8g1SzvCaQHo6Lcv6zu0EA+W2kHrt8dyrHQxGzBBL4kdkzIS+jBMV+EYcMAEAqXqYaLJq5rOZg==", + "dependencies": { + "@babel/types": "^7.20.7" + } + }, + "node_modules/@types/graceful-fs": { + "version": "4.1.9", + "resolved": "https://registry.npmjs.org/@types/graceful-fs/-/graceful-fs-4.1.9.tgz", + "integrity": "sha512-olP3sd1qOEe5dXTSaFvQG+02VdRXcdytWLAZsAq1PecU8uqQAhkrnbli7DagjtXKW/Bl7YJbUsa8MPcuc8LHEQ==", + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/istanbul-lib-coverage": { + "version": "2.0.6", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-coverage/-/istanbul-lib-coverage-2.0.6.tgz", + "integrity": "sha512-2QF/t/auWm0lsy8XtKVPG19v3sSOQlJe/YHZgfjb/KBBHOGSV+J2q/S671rcq9uTBrLAXmZpqJiaQbMT+zNU1w==" + }, + "node_modules/@types/istanbul-lib-report": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/istanbul-lib-report/-/istanbul-lib-report-3.0.3.tgz", + "integrity": "sha512-NQn7AHQnk/RSLOxrBbGyJM/aVQ+pjj5HCgasFxc0K/KhoATfQ/47AyUl15I2yBUpihjmas+a+VJBOqecrFH+uA==", + "dependencies": { + "@types/istanbul-lib-coverage": "*" + } + }, + "node_modules/@types/istanbul-reports": { + "version": "3.0.4", + "resolved": "https://registry.npmjs.org/@types/istanbul-reports/-/istanbul-reports-3.0.4.tgz", + "integrity": "sha512-pk2B1NWalF9toCRu6gjBzR69syFjP4Od8WRAX+0mmf9lAjCRicLOWc+ZrxZHx/0XRjotgkF9t6iaMJ+aXcOdZQ==", + "dependencies": { + "@types/istanbul-lib-report": "*" + } + }, + "node_modules/@types/node": { + "version": "22.6.1", + "resolved": "https://registry.npmjs.org/@types/node/-/node-22.6.1.tgz", + "integrity": "sha512-V48tCfcKb/e6cVUigLAaJDAILdMP0fUW6BidkPK4GpGjXcfbnoHasCZDwz3N3yVt5we2RHm4XTQCpv0KJz9zqw==", + "dependencies": { + "undici-types": "~6.19.2" + } + }, + "node_modules/@types/yargs": { + "version": "17.0.33", + "resolved": "https://registry.npmjs.org/@types/yargs/-/yargs-17.0.33.tgz", + "integrity": "sha512-WpxBCKWPLr4xSsHgz511rFJAM+wS28w2zEO1QDNY5zM/S8ok70NNfztH0xwhqKyaK0OHCbN98LDAZuy1ctxDkA==", + "dependencies": { + "@types/yargs-parser": "*" + } + }, + "node_modules/@types/yargs-parser": { + "version": "21.0.3", + "resolved": "https://registry.npmjs.org/@types/yargs-parser/-/yargs-parser-21.0.3.tgz", + "integrity": "sha512-I4q9QU9MQv4oEOz4tAHJtNz1cwuLxn2F3xcc2iV5WdqLPpUnj30aUuxt1mAxYTG+oe8CZMV/+6rU4S4gRDzqtQ==" + }, + "node_modules/@vercel/ncc": { + "version": "0.36.1", + "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz", + "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==", + "dev": true, + "bin": { + "ncc": "dist/ncc/cli.js" + } + }, + "node_modules/@zaproxy/actions-common-scans": { + "version": "1.2.1", + "resolved": "https://registry.npmjs.org/@zaproxy/actions-common-scans/-/actions-common-scans-1.2.1.tgz", + "integrity": "sha512-lg6Vsp1KCJJboEQ4xsil1YrnJ3w0rV3G6cEMXtnyWn7wnmqLvr+PmukMZBifkQouQzEiEEDYjotQ8KnmV7RZzQ==", + "dependencies": { + "@actions/artifact": "^1.1.0", + "@babel/preset-env": "^7.24.4", + "@octokit/core": "^6.1.2", + "@octokit/plugin-request-log": "^5.3.1", + "@octokit/plugin-retry": "^7.1.1", + "@octokit/plugin-throttling": "^9.2.1", + "@octokit/rest": "^21.0.0", + "@octokit/types": "^13.4.1", + "adm-zip": "^0.5.10", + "babel-jest": "^29.7.0", + "jest-os-detection": "^1.3.1", + "lodash": "^4.17.15" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/adm-zip": { + "version": "0.5.16", + "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.16.tgz", + "integrity": "sha512-TGw5yVi4saajsSEgz25grObGHEUaDrniwvA2qwSC060KfqGPdglhvPMA2lPIoxs3PQIItj2iag35fONcQqgUaQ==", + "engines": { + "node": ">=12.0" + } + }, + "node_modules/ansi-styles": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-3.2.1.tgz", + "integrity": "sha512-VT0ZI6kZRdTh8YyJw3SMbYm/u+NqfsAxEpWO0Pf9sq8/e94WxxOpPKx9FR1FlyCtOVDNOQ+8ntlqFxiRc+r5qA==", + "dependencies": { + "color-convert": "^1.9.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/anymatch": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/anymatch/-/anymatch-3.1.3.tgz", + "integrity": "sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==", + "dependencies": { + "normalize-path": "^3.0.0", + "picomatch": "^2.0.4" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/argparse": { + "version": "1.0.10", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-1.0.10.tgz", + "integrity": "sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==", + "dependencies": { + "sprintf-js": "~1.0.2" + } + }, + "node_modules/babel-jest": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/babel-jest/-/babel-jest-29.7.0.tgz", + "integrity": "sha512-BrvGY3xZSwEcCzKvKsCi2GgHqDqsYkOP4/by5xCgIwGXQxIEh+8ew3gmrE1y7XRR6LHZIj6yLYnUi/mm2KXKBg==", + "dependencies": { + "@jest/transform": "^29.7.0", + "@types/babel__core": "^7.1.14", + "babel-plugin-istanbul": "^6.1.1", + "babel-preset-jest": "^29.6.3", + "chalk": "^4.0.0", + "graceful-fs": "^4.2.9", + "slash": "^3.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.8.0" + } + }, + "node_modules/babel-jest/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/babel-jest/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/babel-jest/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/babel-jest/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/babel-jest/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-jest/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-istanbul": { + "version": "6.1.1", + "resolved": "https://registry.npmjs.org/babel-plugin-istanbul/-/babel-plugin-istanbul-6.1.1.tgz", + "integrity": "sha512-Y1IQok9821cC9onCx5otgFfRm7Lm+I+wwxOx738M/WLPZ9Q42m4IG5W0FNX8WLL2gYMZo3JkuXIH2DOpWM+qwA==", + "dependencies": { + "@babel/helper-plugin-utils": "^7.0.0", + "@istanbuljs/load-nyc-config": "^1.0.0", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-instrument": "^5.0.4", + "test-exclude": "^6.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/babel-plugin-jest-hoist": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-plugin-jest-hoist/-/babel-plugin-jest-hoist-29.6.3.tgz", + "integrity": "sha512-ESAc/RJvGTFEzRwOTT4+lNDk/GNHMkKbNzsvT0qKRfDyyYTskxB5rnU2njIDYVxXCBHHEI1c0YwHob3WaYujOg==", + "dependencies": { + "@babel/template": "^7.3.3", + "@babel/types": "^7.3.3", + "@types/babel__core": "^7.1.14", + "@types/babel__traverse": "^7.0.6" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs2": { + "version": "0.4.11", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs2/-/babel-plugin-polyfill-corejs2-0.4.11.tgz", + "integrity": "sha512-sMEJ27L0gRHShOh5G54uAAPaiCOygY/5ratXuiyb2G46FmlSpc9eFCzYVyDiPxfNbwzA7mYahmjQc5q+CZQ09Q==", + "dependencies": { + "@babel/compat-data": "^7.22.6", + "@babel/helper-define-polyfill-provider": "^0.6.2", + "semver": "^6.3.1" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-corejs3": { + "version": "0.10.6", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-corejs3/-/babel-plugin-polyfill-corejs3-0.10.6.tgz", + "integrity": "sha512-b37+KR2i/khY5sKmWNVQAnitvquQbNdWy6lJdsr0kmquCKEEUgMKK4SboVM3HtfnZilfjr4MMQ7vY58FVWDtIA==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.2", + "core-js-compat": "^3.38.0" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-plugin-polyfill-regenerator": { + "version": "0.6.2", + "resolved": "https://registry.npmjs.org/babel-plugin-polyfill-regenerator/-/babel-plugin-polyfill-regenerator-0.6.2.tgz", + "integrity": "sha512-2R25rQZWP63nGwaAswvDazbPXfrM3HwVoBXK6HcqeKrSrL/JqcC/rDcf95l4r7LXLyxDXc8uQDa064GubtCABg==", + "dependencies": { + "@babel/helper-define-polyfill-provider": "^0.6.2" + }, + "peerDependencies": { + "@babel/core": "^7.4.0 || ^8.0.0-0 <8.0.0" + } + }, + "node_modules/babel-preset-current-node-syntax": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/babel-preset-current-node-syntax/-/babel-preset-current-node-syntax-1.1.0.tgz", + "integrity": "sha512-ldYss8SbBlWva1bs28q78Ju5Zq1F+8BrqBZZ0VFhLBvhh6lCpC2o3gDJi/5DRLs9FgYZCnmPYIVFU4lRXCkyUw==", + "dependencies": { + "@babel/plugin-syntax-async-generators": "^7.8.4", + "@babel/plugin-syntax-bigint": "^7.8.3", + "@babel/plugin-syntax-class-properties": "^7.12.13", + "@babel/plugin-syntax-class-static-block": "^7.14.5", + "@babel/plugin-syntax-import-attributes": "^7.24.7", + "@babel/plugin-syntax-import-meta": "^7.10.4", + "@babel/plugin-syntax-json-strings": "^7.8.3", + "@babel/plugin-syntax-logical-assignment-operators": "^7.10.4", + "@babel/plugin-syntax-nullish-coalescing-operator": "^7.8.3", + "@babel/plugin-syntax-numeric-separator": "^7.10.4", + "@babel/plugin-syntax-object-rest-spread": "^7.8.3", + "@babel/plugin-syntax-optional-catch-binding": "^7.8.3", + "@babel/plugin-syntax-optional-chaining": "^7.8.3", + "@babel/plugin-syntax-private-property-in-object": "^7.14.5", + "@babel/plugin-syntax-top-level-await": "^7.14.5" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/babel-preset-jest": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/babel-preset-jest/-/babel-preset-jest-29.6.3.tgz", + "integrity": "sha512-0B3bhxR6snWXJZtR/RliHTDPRgn1sNHOR0yVtq/IiQFyuOVjFS+wuio/R4gSNkyYmKmJB4wGZv2NZanmKmTnNA==", + "dependencies": { + "babel-plugin-jest-hoist": "^29.6.3", + "babel-preset-current-node-syntax": "^1.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "peerDependencies": { + "@babel/core": "^7.0.0" + } + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + }, + "node_modules/before-after-hook": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-3.0.2.tgz", + "integrity": "sha512-Nik3Sc0ncrMK4UUdXQmAnRtzmNQTAAXmXIopizwZ1W1t8QmfJj+zL4OA2I7XPTPW5z5TDqv4hRo/JzouDJnX3A==" + }, + "node_modules/bottleneck": { + "version": "2.19.5", + "resolved": "https://registry.npmjs.org/bottleneck/-/bottleneck-2.19.5.tgz", + "integrity": "sha512-VHiNCbI1lKdl44tGrhNfU3lup0Tj/ZBMJB5/2ZbNXRCPuRCO7ed2mgcK4r17y+KB2EfuYuRaVlwNbAeaWGSpbw==" + }, + "node_modules/brace-expansion": { + "version": "1.1.11", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", + "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", + "dependencies": { + "balanced-match": "^1.0.0", + "concat-map": "0.0.1" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/browserslist": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", + "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "caniuse-lite": "^1.0.30001646", + "electron-to-chromium": "^1.5.4", + "node-releases": "^2.0.18", + "update-browserslist-db": "^1.1.0" + }, + "bin": { + "browserslist": "cli.js" + }, + "engines": { + "node": "^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7" + } + }, + "node_modules/bser": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/bser/-/bser-2.1.1.tgz", + "integrity": "sha512-gQxTNE/GAfIIrmHLUE3oJyp5FO6HRBfhjnw4/wMmA63ZGDJnWBmgY/lyQBpnDUkGmAhbSe39tx2d/iTOAfglwQ==", + "dependencies": { + "node-int64": "^0.4.0" + } + }, + "node_modules/camelcase": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/camelcase/-/camelcase-5.3.1.tgz", + "integrity": "sha512-L28STB170nwWS63UjtlEOE3dldQApaJXZkOI1uMFfzf3rRuPegHaHesyee+YxQ+W6SvRDQV6UrdOdRiR153wJg==", + "engines": { + "node": ">=6" + } + }, + "node_modules/caniuse-lite": { + "version": "1.0.30001663", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001663.tgz", + "integrity": "sha512-o9C3X27GLKbLeTYZ6HBOLU1tsAcBZsLis28wrVzddShCS16RujjHp9GDHKZqrB3meE0YjhawvMFsGb/igqiPzA==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/caniuse-lite" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ] + }, + "node_modules/chalk": { + "version": "2.4.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-2.4.2.tgz", + "integrity": "sha512-Mti+f9lpJNcwF4tWV8/OrTTtF1gZi+f8FqlyAdouralcFWFQWF2+NgCHShjkCb+IFBLq9buZwE1xckQU4peSuQ==", + "dependencies": { + "ansi-styles": "^3.2.1", + "escape-string-regexp": "^1.0.5", + "supports-color": "^5.3.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/ci-info": { + "version": "3.9.0", + "resolved": "https://registry.npmjs.org/ci-info/-/ci-info-3.9.0.tgz", + "integrity": "sha512-NIxF55hv4nSqQswkAeiOi1r83xy8JldOFDTWiug55KBu9Jnblncd2U6ViHmYgHf01TPZS77NJBhBMKdWj9HQMQ==", + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/sibiraj-s" + } + ], + "engines": { + "node": ">=8" + } + }, + "node_modules/color-convert": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-1.9.3.tgz", + "integrity": "sha512-QfAUtd+vFdAtFQcC8CCyYt1fYWxSqAiK2cSD6zDB8N3cpsEBAvRxp9zOGg6G/SHHJYAT88/az/IuDGALsNVbGg==", + "dependencies": { + "color-name": "1.1.3" + } + }, + "node_modules/color-name": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.3.tgz", + "integrity": "sha512-72fSenhMw2HZMTVHeCA9KCmpEIbzWiQsjN+BHcBbS9vr1mtt+vJjPdksIBNUmKAW8TFUDPJK5SUU3QhE9NEXDw==" + }, + "node_modules/concat-map": { + "version": "0.0.1", + "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", + "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + }, + "node_modules/convert-source-map": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/convert-source-map/-/convert-source-map-2.0.0.tgz", + "integrity": "sha512-Kvp459HrV2FEJ1CAsi1Ku+MY3kasH19TFykTz2xWmMeq6bk2NU3XXvfJ+Q61m0xktWwt+1HSYf3JZsTms3aRJg==" + }, + "node_modules/core-js-compat": { + "version": "3.38.1", + "resolved": "https://registry.npmjs.org/core-js-compat/-/core-js-compat-3.38.1.tgz", + "integrity": "sha512-JRH6gfXxGmrzF3tZ57lFx97YARxCXPaMzPo6jELZhv88pBH5VXpQ+y0znKGlFnzuaihqhLbefxSJxWJMPtfDzw==", + "dependencies": { + "browserslist": "^4.23.3" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/core-js" + } + }, + "node_modules/debug": { + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", + "dependencies": { + "ms": "^2.1.3" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/electron-to-chromium": { + "version": "1.5.28", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.28.tgz", + "integrity": "sha512-VufdJl+rzaKZoYVUijN13QcXVF5dWPZANeFTLNy+OSpHdDL5ynXTF35+60RSBbaQYB1ae723lQXHCrf4pyLsMw==" + }, + "node_modules/escalade": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", + "engines": { + "node": ">=6" + } + }, + "node_modules/escape-string-regexp": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/escape-string-regexp/-/escape-string-regexp-1.0.5.tgz", + "integrity": "sha512-vbRorB5FUQWvla16U8R/qgaFIya2qGzwDrNmCZuYKrbdSUMG6I1ZCGQRefkRVhuOkIGVne7BQ35DSfo1qvJqFg==", + "engines": { + "node": ">=0.8.0" + } + }, + "node_modules/esprima": { + "version": "4.0.1", + "resolved": "https://registry.npmjs.org/esprima/-/esprima-4.0.1.tgz", + "integrity": "sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==", + "bin": { + "esparse": "bin/esparse.js", + "esvalidate": "bin/esvalidate.js" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/esutils": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/esutils/-/esutils-2.0.3.tgz", + "integrity": "sha512-kVscqXk4OCp68SZ0dkgEKVi6/8ij300KBWTJq32P/dYeWTSwK41WyTxalN1eRmA5Z9UU/LX9D7FWSmV9SAYx6g==", + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/fast-json-stable-stringify": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/fast-json-stable-stringify/-/fast-json-stable-stringify-2.1.0.tgz", + "integrity": "sha512-lhd/wF+Lk98HZoTCtlVraHtfh5XYijIjalXck7saUtuanSDyLMxnHhSXEDJqHxD7msR8D0uCmqlkwjCV8xvwHw==" + }, + "node_modules/fb-watchman": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/fb-watchman/-/fb-watchman-2.0.2.tgz", + "integrity": "sha512-p5161BqbuCaSnB8jIbzQHOlpgsPmK5rJVDfDKO91Axs5NC1uu3HRQm6wt9cd9/+GtQQIO53JdGXXoyDpTAsgYA==", + "dependencies": { + "bser": "2.1.1" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/find-up": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/find-up/-/find-up-4.1.0.tgz", + "integrity": "sha512-PpOwAdQ/YlXQ2vj8a3h8IipDuYRi3wceVQQGYWxNINccq40Anw7BlsEXCMbt1Zt+OLA6Fq9suIpIWD0OsnISlw==", + "dependencies": { + "locate-path": "^5.0.0", + "path-exists": "^4.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/fs.realpath": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", + "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + }, + "node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/function-bind": { + "version": "1.1.2", + "resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz", + "integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==", + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/gensync": { + "version": "1.0.0-beta.2", + "resolved": "https://registry.npmjs.org/gensync/-/gensync-1.0.0-beta.2.tgz", + "integrity": "sha512-3hN7NaskYvMDLQY55gnW3NQ+mesEAepTqlg+VEbj7zzqEMBVNhzcGYYeqFo/TlYz6eQiFcp1HcsCZO+nGgS8zg==", + "engines": { + "node": ">=6.9.0" + } + }, + "node_modules/get-package-type": { + "version": "0.1.0", + "resolved": "https://registry.npmjs.org/get-package-type/-/get-package-type-0.1.0.tgz", + "integrity": "sha512-pjzuKtY64GYfWizNAJ0fr9VqttZkNiK2iS430LtIHzjBEr6bX8Am2zm4sW4Ro5wjWW5cAlRL1qAMTcXbjNAO2Q==", + "engines": { + "node": ">=8.0.0" + } + }, + "node_modules/glob": { + "version": "7.2.3", + "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", + "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", + "deprecated": "Glob versions prior to v9 are no longer supported", + "dependencies": { + "fs.realpath": "^1.0.0", + "inflight": "^1.0.4", + "inherits": "2", + "minimatch": "^3.1.1", + "once": "^1.3.0", + "path-is-absolute": "^1.0.0" + }, + "engines": { + "node": "*" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/globals": { + "version": "11.12.0", + "resolved": "https://registry.npmjs.org/globals/-/globals-11.12.0.tgz", + "integrity": "sha512-WOBp/EEGUiIsJSp7wcv/y6MO+lV9UoncWqxuFfm8eBwzWNgyfBd6Gz+IeKQ9jCmyhoH99g15M3T+QaVHFjizVA==", + "engines": { + "node": ">=4" + } + }, + "node_modules/graceful-fs": { + "version": "4.2.11", + "resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz", + "integrity": "sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==" + }, + "node_modules/has-flag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-3.0.0.tgz", + "integrity": "sha512-sKJf1+ceQBr4SMkvQnBDNDtf4TXpVhVGateu0t918bl30FnbE2m4vNLX+VWe/dpjlb+HugGYzW7uQXH98HPEYw==", + "engines": { + "node": ">=4" + } + }, + "node_modules/hasown": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz", + "integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==", + "dependencies": { + "function-bind": "^1.1.2" + }, + "engines": { + "node": ">= 0.4" + } + }, + "node_modules/imurmurhash": { + "version": "0.1.4", + "resolved": "https://registry.npmjs.org/imurmurhash/-/imurmurhash-0.1.4.tgz", + "integrity": "sha512-JmXMZ6wuvDmLiHEml9ykzqO6lwFbof0GG4IkcGaENdCRDDmMVnny7s5HsIgHCbaq0w2MyPhDqkhTUgS2LU2PHA==", + "engines": { + "node": ">=0.8.19" + } + }, + "node_modules/inflight": { + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", + "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", + "deprecated": "This module is not supported, and leaks memory. Do not use it. Check out lru-cache if you want a good and tested way to coalesce async requests by a key value, which is much more comprehensive and powerful.", + "dependencies": { + "once": "^1.3.0", + "wrappy": "1" + } + }, + "node_modules/inherits": { + "version": "2.0.4", + "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", + "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" + }, + "node_modules/is-core-module": { + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", + "dependencies": { + "hasown": "^2.0.2" + }, + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/istanbul-lib-coverage": { + "version": "3.2.2", + "resolved": "https://registry.npmjs.org/istanbul-lib-coverage/-/istanbul-lib-coverage-3.2.2.tgz", + "integrity": "sha512-O8dpsF+r0WV/8MNRKfnmrtCWhuKjxrq2w+jpzBL5UZKTi2LeVWnWOmWRxFlesJONmc+wLAGvKQZEOanko0LFTg==", + "engines": { + "node": ">=8" + } + }, + "node_modules/istanbul-lib-instrument": { + "version": "5.2.1", + "resolved": "https://registry.npmjs.org/istanbul-lib-instrument/-/istanbul-lib-instrument-5.2.1.tgz", + "integrity": "sha512-pzqtp31nLv/XFOzXGuvhCb8qhjmTVo5vjVk19XE4CRlSWz0KoeJ3bw9XsA7nOp9YBf4qHjwBxkDzKcME/J29Yg==", + "dependencies": { + "@babel/core": "^7.12.3", + "@babel/parser": "^7.14.7", + "@istanbuljs/schema": "^0.1.2", + "istanbul-lib-coverage": "^3.2.0", + "semver": "^6.3.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-haste-map": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-haste-map/-/jest-haste-map-29.7.0.tgz", + "integrity": "sha512-fP8u2pyfqx0K1rGn1R9pyE0/KTn+G7PxktWidOBTqFPLYX0b9ksaMFkhK5vrS3DVun09pckLdlx90QthlW7AmA==", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/graceful-fs": "^4.1.3", + "@types/node": "*", + "anymatch": "^3.0.3", + "fb-watchman": "^2.0.0", + "graceful-fs": "^4.2.9", + "jest-regex-util": "^29.6.3", + "jest-util": "^29.7.0", + "jest-worker": "^29.7.0", + "micromatch": "^4.0.4", + "walker": "^1.0.8" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + }, + "optionalDependencies": { + "fsevents": "^2.3.2" + } + }, + "node_modules/jest-os-detection": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/jest-os-detection/-/jest-os-detection-1.3.1.tgz", + "integrity": "sha512-1epFqoue/1J8f9SHOegDE4mY9kJBpetcZAeWtB0SG4XjtkI252mOu4TLJMjl1hDb7v6VefKPeWUweu/TsR7ZcQ==" + }, + "node_modules/jest-regex-util": { + "version": "29.6.3", + "resolved": "https://registry.npmjs.org/jest-regex-util/-/jest-regex-util-29.6.3.tgz", + "integrity": "sha512-KJJBsRCyyLNWCNBOvZyRDnAIfUiRJ8v+hOBQYGn8gDyF3UegwiP4gwRR3/SDa42g1YbVycTidUF3rKjyLFDWbg==", + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-util/-/jest-util-29.7.0.tgz", + "integrity": "sha512-z6EbKajIpqGKU56y5KBUgy1dt1ihhQJgWzUlZHArA/+X2ad7Cb5iF+AK1EWVL/Bo7Rz9uurpqw6SiBCefUbCGA==", + "dependencies": { + "@jest/types": "^29.6.3", + "@types/node": "*", + "chalk": "^4.0.0", + "ci-info": "^3.2.0", + "graceful-fs": "^4.2.9", + "picomatch": "^2.2.3" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } + }, + "node_modules/jest-util/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/chalk": { + "version": "4.1.2", + "resolved": "https://registry.npmjs.org/chalk/-/chalk-4.1.2.tgz", + "integrity": "sha512-oKnbhFyRIXpUuez8iBMmyEa4nbj4IOQyuhc/wy9kY7/WVPcwIO9VA668Pu8RkO7+0G76SLROeyw9CpQ061i4mA==", + "dependencies": { + "ansi-styles": "^4.1.0", + "supports-color": "^7.1.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/chalk?sponsor=1" + } + }, + "node_modules/jest-util/node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/jest-util/node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==" + }, + "node_modules/jest-util/node_modules/has-flag": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" + } + }, + "node_modules/jest-util/node_modules/supports-color": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-7.2.0.tgz", + "integrity": "sha512-qpCAvRl9stuOHveKsn7HncJRvv501qIacKzQlO/+Lwxc9+0q2wLyv4Dfvt80/DPn2pqOBsJdDiogXGR9+OvwRw==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=8" + } }, - "@fastify/busboy": { - "version": "2.1.1", - "resolved": "https://registry.npmjs.org/@fastify/busboy/-/busboy-2.1.1.tgz", - "integrity": "sha512-vBZP4NlzfOlerQTnba4aqZoMhE/a9HY7HRqoOPaETQcSQuWEIyZMHGfVu6w9wGtGK5fED5qRs2DteVCjOH60sA==" + "node_modules/jest-worker": { + "version": "29.7.0", + "resolved": "https://registry.npmjs.org/jest-worker/-/jest-worker-29.7.0.tgz", + "integrity": "sha512-eIz2msL/EzL9UFTFFx7jBTkeZfku0yUAyZZZmJ93H2TYEiroIx2PQjEXcwYtYl8zXCxb+PAmA2hLIt/6ZEkPHw==", + "dependencies": { + "@types/node": "*", + "jest-util": "^29.7.0", + "merge-stream": "^2.0.0", + "supports-color": "^8.0.0" + }, + "engines": { + "node": "^14.15.0 || ^16.10.0 || >=18.0.0" + } }, - "@octokit/auth-token": { + "node_modules/jest-worker/node_modules/has-flag": { "version": "4.0.0", - "resolved": "https://registry.npmjs.org/@octokit/auth-token/-/auth-token-4.0.0.tgz", - "integrity": "sha512-tY/msAuJo6ARbK6SPIxZrPBms3xPbfwBrulZe0Wtr/DIY9lje2HeV1uoebShn6mx7SjCHif6EjMvoREj+gZ+SA==" - }, - "@octokit/core": { - "version": "5.1.0", - "resolved": "https://registry.npmjs.org/@octokit/core/-/core-5.1.0.tgz", - "integrity": "sha512-BDa2VAMLSh3otEiaMJ/3Y36GU4qf6GI+VivQ/P41NC6GHcdxpKlqV0ikSZ5gdQsmS3ojXeRx5vasgNTinF0Q4g==", - "requires": { - "@octokit/auth-token": "^4.0.0", - "@octokit/graphql": "^7.0.0", - "@octokit/request": "^8.0.2", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "before-after-hook": "^2.2.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/endpoint": { - "version": "9.0.4", - "resolved": "https://registry.npmjs.org/@octokit/endpoint/-/endpoint-9.0.4.tgz", - "integrity": "sha512-DWPLtr1Kz3tv8L0UvXTDP1fNwM0S+z6EJpRcvH66orY6Eld4XBMCSYsaWp4xIm61jTWxK68BrR7ibO+vSDnZqw==", - "requires": { - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/graphql": { - "version": "7.0.2", - "resolved": "https://registry.npmjs.org/@octokit/graphql/-/graphql-7.0.2.tgz", - "integrity": "sha512-OJ2iGMtj5Tg3s6RaXH22cJcxXRi7Y3EBqbHTBRq+PQAqfaS8f/236fUrWhfSn8P4jovyzqucxme7/vWSSZBX2Q==", - "requires": { - "@octokit/request": "^8.0.1", - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/openapi-types": { - "version": "20.0.0", - "resolved": "https://registry.npmjs.org/@octokit/openapi-types/-/openapi-types-20.0.0.tgz", - "integrity": "sha512-EtqRBEjp1dL/15V7WiX5LJMIxxkdiGJnabzYx5Apx4FkQIFgAfKumXeYAqqJCj1s+BMX4cPFIFC4OLCR6stlnA==" - }, - "@octokit/plugin-paginate-rest": { - "version": "9.2.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-paginate-rest/-/plugin-paginate-rest-9.2.1.tgz", - "integrity": "sha512-wfGhE/TAkXZRLjksFXuDZdmGnJQHvtU/joFQdweXUgzo1XwvBCD4o4+75NtFfjfLK5IwLf9vHTfSiU3sLRYpRw==", - "requires": { - "@octokit/types": "^12.6.0" - } - }, - "@octokit/plugin-rest-endpoint-methods": { - "version": "10.4.1", - "resolved": "https://registry.npmjs.org/@octokit/plugin-rest-endpoint-methods/-/plugin-rest-endpoint-methods-10.4.1.tgz", - "integrity": "sha512-xV1b+ceKV9KytQe3zCVqjg+8GTGfDYwaT1ATU5isiUyVtlVAO3HNdzpS4sr4GBx4hxQ46s7ITtZrAsxG22+rVg==", - "requires": { - "@octokit/types": "^12.6.0" - } - }, - "@octokit/request": { - "version": "8.2.0", - "resolved": "https://registry.npmjs.org/@octokit/request/-/request-8.2.0.tgz", - "integrity": "sha512-exPif6x5uwLqv1N1irkLG1zZNJkOtj8bZxuVHd71U5Ftuxf2wGNvAJyNBcPbPC+EBzwYEbBDdSFb8EPcjpYxPQ==", - "requires": { - "@octokit/endpoint": "^9.0.0", - "@octokit/request-error": "^5.0.0", - "@octokit/types": "^12.0.0", - "universal-user-agent": "^6.0.0" - } - }, - "@octokit/request-error": { - "version": "5.0.1", - "resolved": "https://registry.npmjs.org/@octokit/request-error/-/request-error-5.0.1.tgz", - "integrity": "sha512-X7pnyTMV7MgtGmiXBwmO6M5kIPrntOXdyKZLigNfQWSEQzVxR4a4vo49vJjTWX70mPndj8KhfT4Dx+2Ng3vnBQ==", - "requires": { - "@octokit/types": "^12.0.0", - "deprecation": "^2.0.0", - "once": "^1.4.0" + "resolved": "https://registry.npmjs.org/has-flag/-/has-flag-4.0.0.tgz", + "integrity": "sha512-EykJT/Q1KjTWctppgIAgfSO0tKVuZUjhgMr17kqTumMl6Afv3EISleU7qZUzoXDFTAHTDC4NOoG/ZxU3EvlMPQ==", + "engines": { + "node": ">=8" } }, - "@octokit/types": { - "version": "12.6.0", - "resolved": "https://registry.npmjs.org/@octokit/types/-/types-12.6.0.tgz", - "integrity": "sha512-1rhSOfRa6H9w4YwK0yrf5faDaDTb+yLyBUKOCV4xtCDB5VmIPqd/v9yr9o6SAzOAlRxMiRiCic6JVM1/kunVkw==", - "requires": { - "@octokit/openapi-types": "^20.0.0" + "node_modules/jest-worker/node_modules/supports-color": { + "version": "8.1.1", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-8.1.1.tgz", + "integrity": "sha512-MpUEN2OodtUzxvKQl72cUF7RQ5EiHsGvSsVG0ia9c5RbWGL2CI4C7EpPS8UTBIplnlzZiNuV56w+FuNxy3ty2Q==", + "dependencies": { + "has-flag": "^4.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/supports-color?sponsor=1" } }, - "@vercel/ncc": { - "version": "0.36.1", - "resolved": "https://registry.npmjs.org/@vercel/ncc/-/ncc-0.36.1.tgz", - "integrity": "sha512-S4cL7Taa9yb5qbv+6wLgiKVZ03Qfkc4jGRuiUQMQ8HGBD5pcNRnHeYM33zBvJE4/zJGjJJ8GScB+WmTsn9mORw==", - "dev": true + "node_modules/js-tokens": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/js-tokens/-/js-tokens-4.0.0.tgz", + "integrity": "sha512-RdJUflcE3cUzKiMqQgsCu06FPu9UdIJO0beYbPhHN4k6apgJtifcoCtT9bcxOpYBtpD2kCM6Sbzg4CausW/PKQ==" }, - "@zaproxy/actions-common-scans": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/@zaproxy/actions-common-scans/-/actions-common-scans-1.1.0.tgz", - "integrity": "sha512-r+nD3DwcESwd6yJRHOhFGfH30MZL/za8oWfdSy+IP7gxdJtg948eYvli9LlW81FGb7H6sRSfPPWu3CpJuvCygg==", - "requires": { - "@actions/artifact": "^1.1.0", - "@actions/github": "^6.0.0", - "adm-zip": "^0.5.10", - "jest-os-detection": "^1.3.1", - "lodash": "^4.17.15" + "node_modules/js-yaml": { + "version": "3.14.1", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz", + "integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==", + "dependencies": { + "argparse": "^1.0.7", + "esprima": "^4.0.0" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" } }, - "adm-zip": { - "version": "0.5.10", - "resolved": "https://registry.npmjs.org/adm-zip/-/adm-zip-0.5.10.tgz", - "integrity": "sha512-x0HvcHqVJNTPk/Bw8JbLWlWoo6Wwnsug0fnYYro1HBrjxZ3G7/AZk7Ahv8JwDe1uIcz8eBqvu86FuF1POiG7vQ==" - }, - "balanced-match": { - "version": "1.0.2", - "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", - "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==" + "node_modules/jsesc": { + "version": "2.5.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", + "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "bin": { + "jsesc": "bin/jsesc" + }, + "engines": { + "node": ">=4" + } }, - "before-after-hook": { + "node_modules/json5": { "version": "2.2.3", - "resolved": "https://registry.npmjs.org/before-after-hook/-/before-after-hook-2.2.3.tgz", - "integrity": "sha512-NzUnlZexiaH/46WDhANlyR2bXRopNg4F/zuSA3OpZnllCUgRaOF2znDioDWrmbNVsuZk6l9pMquQB38cfBZwkQ==" - }, - "brace-expansion": { - "version": "1.1.11", - "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-1.1.11.tgz", - "integrity": "sha512-iCuPHDFgrHX7H2vEI/5xpz07zSHB00TpugqhmYtVmMO6518mCuRMoOYFldEBl0g187ufozdaHgWKcYFb61qGiA==", - "requires": { - "balanced-match": "^1.0.0", - "concat-map": "0.0.1" + "resolved": "https://registry.npmjs.org/json5/-/json5-2.2.3.tgz", + "integrity": "sha512-XmOWe7eyHYH14cLdVPoyg+GOH3rYX++KpzrylJwSW98t3Nk+U8XOl8FWKOgwtzdb8lXGf6zYwDUzeHMWfxasyg==", + "bin": { + "json5": "lib/cli.js" + }, + "engines": { + "node": ">=6" } }, - "concat-map": { - "version": "0.0.1", - "resolved": "https://registry.npmjs.org/concat-map/-/concat-map-0.0.1.tgz", - "integrity": "sha512-/Srv4dswyQNBfohGpz9o6Yb3Gz3SrUDqBH5rTuhGR7ahtlbYKnVxw2bCFMRljaA7EXHaXZ8wsHdodFvbkhKmqg==" + "node_modules/locate-path": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/locate-path/-/locate-path-5.0.0.tgz", + "integrity": "sha512-t7hw9pI+WvuwNJXwk5zVHpyhIqzg2qTlklJOf0mVxGSbe3Fp2VieZcduNYjaLDoy6p9uGpQEGWG87WpMKlNq8g==", + "dependencies": { + "p-locate": "^4.1.0" + }, + "engines": { + "node": ">=8" + } }, - "deprecation": { - "version": "2.3.1", - "resolved": "https://registry.npmjs.org/deprecation/-/deprecation-2.3.1.tgz", - "integrity": "sha512-xmHIy4F3scKVwMsQ4WnVaS8bHOx0DmVwRywosKhaILI0ywMDWPtBSku2HNxRvF7jtwDRsoEwYQSfbxj8b7RlJQ==" + "node_modules/lodash": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", + "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" }, - "fs.realpath": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/fs.realpath/-/fs.realpath-1.0.0.tgz", - "integrity": "sha512-OO0pH2lK6a0hZnAdau5ItzHPI6pUlvI7jMVnxUQRtw4owF2wk8lOSabtGDCTP4Ggrg2MbGnWO9X8K1t4+fGMDw==" + "node_modules/lodash.debounce": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/lodash.debounce/-/lodash.debounce-4.0.8.tgz", + "integrity": "sha512-FT1yDzDYEoYWhnSGnpE/4Kj1fLZkDFyqRb7fNt6FdYOSxlUWAtp42Eh6Wb0rGIv/m9Bgo7x4GhQbm5Ys4SG5ow==" }, - "glob": { - "version": "7.2.3", - "resolved": "https://registry.npmjs.org/glob/-/glob-7.2.3.tgz", - "integrity": "sha512-nFR0zLpU2YCaRxwoCJvL6UvCH2JFyFVIvwTLsIf21AuHlMskA1hhTdk+LlYJtOlYt9v6dvszD2BGRqBL+iQK9Q==", - "requires": { - "fs.realpath": "^1.0.0", - "inflight": "^1.0.4", - "inherits": "2", - "minimatch": "^3.1.1", - "once": "^1.3.0", - "path-is-absolute": "^1.0.0" + "node_modules/lru-cache": { + "version": "5.1.1", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-5.1.1.tgz", + "integrity": "sha512-KpNARQA3Iwv+jTA0utUVVbrh+Jlrr1Fv0e56GGzAFOXN7dk/FviaDW8LHmK52DlcH4WP2n6gI8vN1aesBFgo9w==", + "dependencies": { + "yallist": "^3.0.2" } }, - "inflight": { - "version": "1.0.6", - "resolved": "https://registry.npmjs.org/inflight/-/inflight-1.0.6.tgz", - "integrity": "sha512-k92I/b08q4wvFscXCLvqfsHCrjrF7yiXsQuIVvVE7N82W3+aqpzuUdBbfhWcy/FZR3/4IgflMgKLOsvPDrGCJA==", - "requires": { - "once": "^1.3.0", - "wrappy": "1" + "node_modules/makeerror": { + "version": "1.0.12", + "resolved": "https://registry.npmjs.org/makeerror/-/makeerror-1.0.12.tgz", + "integrity": "sha512-JmqCvUhmt43madlpFzG4BQzG2Z3m6tvQDNKdClZnO3VbIudJYmxsT0FNJMeiB2+JTSlTQTSbU8QdesVmwJcmLg==", + "dependencies": { + "tmpl": "1.0.5" } }, - "inherits": { - "version": "2.0.4", - "resolved": "https://registry.npmjs.org/inherits/-/inherits-2.0.4.tgz", - "integrity": "sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==" - }, - "jest-os-detection": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/jest-os-detection/-/jest-os-detection-1.3.1.tgz", - "integrity": "sha512-1epFqoue/1J8f9SHOegDE4mY9kJBpetcZAeWtB0SG4XjtkI252mOu4TLJMjl1hDb7v6VefKPeWUweu/TsR7ZcQ==" + "node_modules/merge-stream": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/merge-stream/-/merge-stream-2.0.0.tgz", + "integrity": "sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==" }, - "lodash": { - "version": "4.17.21", - "resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz", - "integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg==" + "node_modules/micromatch": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } }, - "minimatch": { + "node_modules/minimatch": { "version": "3.1.2", "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-3.1.2.tgz", "integrity": "sha512-J7p63hRiAjw1NDEww1W7i37+ByIrOWO5XQQAzZ3VOcL0PNybwpfmV/N05zFAzwQ9USyEcX6t3UO+K5aqBQOIHw==", - "requires": { + "dependencies": { "brace-expansion": "^1.1.7" + }, + "engines": { + "node": "*" + } + }, + "node_modules/ms": { + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==" + }, + "node_modules/node-int64": { + "version": "0.4.0", + "resolved": "https://registry.npmjs.org/node-int64/-/node-int64-0.4.0.tgz", + "integrity": "sha512-O5lz91xSOeoXP6DulyHfllpq+Eg00MWitZIbtPfoSEvqIHdl5gfcY6hYzDWnj0qD5tz52PI08u9qUvSVeUBeHw==" + }, + "node_modules/node-releases": { + "version": "2.0.18", + "resolved": "https://registry.npmjs.org/node-releases/-/node-releases-2.0.18.tgz", + "integrity": "sha512-d9VeXT4SJ7ZeOqGX6R5EM022wpL+eWPooLI+5UpWn2jCT1aosUQEhQP214x33Wkwx3JQMvIm+tIoVOdodFS40g==" + }, + "node_modules/normalize-path": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/normalize-path/-/normalize-path-3.0.0.tgz", + "integrity": "sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==", + "engines": { + "node": ">=0.10.0" } }, - "once": { + "node_modules/once": { "version": "1.4.0", "resolved": "https://registry.npmjs.org/once/-/once-1.4.0.tgz", "integrity": "sha512-lNaJgI+2Q5URQBkccEKHTQOPaXdUxnZZElQTZY0MFUAuaEqe1E+Nyvgdz/aIyNi6Z9MzO5dv1H8n58/GELp3+w==", - "requires": { + "dependencies": { "wrappy": "1" } }, - "path-is-absolute": { + "node_modules/p-limit": { + "version": "2.3.0", + "resolved": "https://registry.npmjs.org/p-limit/-/p-limit-2.3.0.tgz", + "integrity": "sha512-//88mFWSJx8lxCzwdAABTJL2MyWB12+eIY7MDL2SqLmAkeKU9qxRvWuSyTjm3FUmpBEMuFfckAIqEaVGUDxb6w==", + "dependencies": { + "p-try": "^2.0.0" + }, + "engines": { + "node": ">=6" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/p-locate": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/p-locate/-/p-locate-4.1.0.tgz", + "integrity": "sha512-R79ZZ/0wAxKGu3oYMlz8jy/kbhsNrS7SKZ7PxEHBgJ5+F2mtFW2fK2cOtBh1cHYkQsbzFV7I+EoRKe6Yt0oK7A==", + "dependencies": { + "p-limit": "^2.2.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/p-try": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/p-try/-/p-try-2.2.0.tgz", + "integrity": "sha512-R4nPAVTAU0B9D35/Gk3uJf/7XYbQcyohSKdvAxIRSNghFl4e71hVoGnBNQz9cWaXxO2I10KTC+3jMdvvoKw6dQ==", + "engines": { + "node": ">=6" + } + }, + "node_modules/path-exists": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/path-exists/-/path-exists-4.0.0.tgz", + "integrity": "sha512-ak9Qy5Q7jYb2Wwcey5Fpvg2KoAc/ZIhLSLOSBmRmygPsGwkVVt0fZa0qrtMz+m6tJTAHfZQ8FnmB4MG4LWy7/w==", + "engines": { + "node": ">=8" + } + }, + "node_modules/path-is-absolute": { "version": "1.0.1", "resolved": "https://registry.npmjs.org/path-is-absolute/-/path-is-absolute-1.0.1.tgz", - "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==" + "integrity": "sha512-AVbw3UJ2e9bq64vSaS9Am0fje1Pa8pbGqTTsmXfaIiMpnr5DlDhfJOuLj9Sf95ZPVDAUerDfEk88MPmPe7UCQg==", + "engines": { + "node": ">=0.10.0" + } }, - "rimraf": { - "version": "3.0.2", - "resolved": "https://registry.npmjs.org/rimraf/-/rimraf-3.0.2.tgz", - "integrity": "sha512-JZkJMZkAGFFPP2YqXZXPbMlMBgsxzE8ILs4lMIX/2o0L9UBw9O/Y3o6wFw/i9YLapcUJWwqbi3kdxIPdC62TIA==", - "requires": { - "glob": "^7.1.3" + "node_modules/path-parse": { + "version": "1.0.7", + "resolved": "https://registry.npmjs.org/path-parse/-/path-parse-1.0.7.tgz", + "integrity": "sha512-LDJzPVEEEPR+y48z93A0Ed0yXb8pAByGWo/k5YYdYgpY2/2EsOsksJrq7lOHxryrVOn1ejG6oAp8ahvOIQD8sw==" + }, + "node_modules/picocolors": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==" + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/pirates": { + "version": "4.0.6", + "resolved": "https://registry.npmjs.org/pirates/-/pirates-4.0.6.tgz", + "integrity": "sha512-saLsH7WeYYPiD25LDuLRRY/i+6HaPYr6G1OUlN39otzkSTxKnubR9RTxS3/Kk50s1g2JTgFwWQDQyplC5/SHZg==", + "engines": { + "node": ">= 6" + } + }, + "node_modules/regenerate": { + "version": "1.4.2", + "resolved": "https://registry.npmjs.org/regenerate/-/regenerate-1.4.2.tgz", + "integrity": "sha512-zrceR/XhGYU/d/opr2EKO7aRHUeiBI8qjtfHqADTwZd6Szfy16la6kqD0MIUs5z5hx6AaKa+PixpPrR289+I0A==" + }, + "node_modules/regenerate-unicode-properties": { + "version": "10.2.0", + "resolved": "https://registry.npmjs.org/regenerate-unicode-properties/-/regenerate-unicode-properties-10.2.0.tgz", + "integrity": "sha512-DqHn3DwbmmPVzeKj9woBadqmXxLvQoQIwu7nopMc72ztvxVmVk2SBhSnx67zuye5TP+lJsb/TBQsjLKhnDf3MA==", + "dependencies": { + "regenerate": "^1.4.2" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regenerator-runtime": { + "version": "0.14.1", + "resolved": "https://registry.npmjs.org/regenerator-runtime/-/regenerator-runtime-0.14.1.tgz", + "integrity": "sha512-dYnhHh0nJoMfnkZs6GmmhFknAGRrLznOu5nc9ML+EJxGvrx6H7teuevqVqCuPcPK//3eDrrjQhehXVx9cnkGdw==" + }, + "node_modules/regenerator-transform": { + "version": "0.15.2", + "resolved": "https://registry.npmjs.org/regenerator-transform/-/regenerator-transform-0.15.2.tgz", + "integrity": "sha512-hfMp2BoF0qOk3uc5V20ALGDS2ddjQaLrdl7xrGXvAIow7qeWRM2VA2HuCHkUKk9slq3VwEwLNK3DFBqDfPGYtg==", + "dependencies": { + "@babel/runtime": "^7.8.4" + } + }, + "node_modules/regexpu-core": { + "version": "5.3.2", + "resolved": "https://registry.npmjs.org/regexpu-core/-/regexpu-core-5.3.2.tgz", + "integrity": "sha512-RAM5FlZz+Lhmo7db9L298p2vHP5ZywrVXmVXpmAD9GuL5MPH6t9ROw1iA/wfHkQ76Qe7AaPF0nGuim96/IrQMQ==", + "dependencies": { + "@babel/regjsgen": "^0.8.0", + "regenerate": "^1.4.2", + "regenerate-unicode-properties": "^10.1.0", + "regjsparser": "^0.9.1", + "unicode-match-property-ecmascript": "^2.0.0", + "unicode-match-property-value-ecmascript": "^2.1.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/regjsparser": { + "version": "0.9.1", + "resolved": "https://registry.npmjs.org/regjsparser/-/regjsparser-0.9.1.tgz", + "integrity": "sha512-dQUtn90WanSNl+7mQKcXAgZxvUe7Z0SqXlgzv0za4LwiUhyzBC58yQO3liFoUgu8GiJVInAhJjkj1N0EtQ5nkQ==", + "dependencies": { + "jsesc": "~0.5.0" + }, + "bin": { + "regjsparser": "bin/parser" + } + }, + "node_modules/regjsparser/node_modules/jsesc": { + "version": "0.5.0", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-0.5.0.tgz", + "integrity": "sha512-uZz5UnB7u4T9LvwmFqXii7pZSouaRPorGs5who1Ip7VO0wxanFvBL7GkM6dTHlgX+jhBApRetaWpnDabOeTcnA==", + "bin": { + "jsesc": "bin/jsesc" + } + }, + "node_modules/resolve": { + "version": "1.22.8", + "resolved": "https://registry.npmjs.org/resolve/-/resolve-1.22.8.tgz", + "integrity": "sha512-oKWePCxqpd6FlLvGV1VU0x7bkPmmCNolxzjMf4NczoDnQcIWrAF+cPtZn5i6n+RfD2d9i0tzpKnG6Yk168yIyw==", + "dependencies": { + "is-core-module": "^2.13.0", + "path-parse": "^1.0.7", + "supports-preserve-symlinks-flag": "^1.0.0" + }, + "bin": { + "resolve": "bin/resolve" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/resolve-from": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/resolve-from/-/resolve-from-5.0.0.tgz", + "integrity": "sha512-qYg9KP24dD5qka9J47d0aVky0N+b4fTU89LN9iDnjB5waksiC49rvMB0PrUJQGoTmH50XPiqOvAjDfaijGxYZw==", + "engines": { + "node": ">=8" + } + }, + "node_modules/semver": { + "version": "6.3.1", + "resolved": "https://registry.npmjs.org/semver/-/semver-6.3.1.tgz", + "integrity": "sha512-BR7VvDCVHO+q2xBEWskxS6DJE1qRnb7DxzUrogb71CWoSficBxYsiAGd+Kl0mmq/MprG9yArRkyrQxTO6XjMzA==", + "bin": { + "semver": "bin/semver.js" + } + }, + "node_modules/signal-exit": { + "version": "3.0.7", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-3.0.7.tgz", + "integrity": "sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==" + }, + "node_modules/slash": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/slash/-/slash-3.0.0.tgz", + "integrity": "sha512-g9Q1haeby36OSStwb4ntCGGGaKsaVSjQ68fBxoQcutl5fS1vuY18H3wSt3jFyFtrkx+Kz0V1G85A4MyAdDMi2Q==", + "engines": { + "node": ">=8" + } + }, + "node_modules/sprintf-js": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/sprintf-js/-/sprintf-js-1.0.3.tgz", + "integrity": "sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==" + }, + "node_modules/supports-color": { + "version": "5.5.0", + "resolved": "https://registry.npmjs.org/supports-color/-/supports-color-5.5.0.tgz", + "integrity": "sha512-QjVjwdXIt408MIiAqCX4oUKsgU2EqAGzs2Ppkm4aQYbjm+ZEWEcW4SfFNTr4uMNZma0ey4f5lgLrkB0aX0QMow==", + "dependencies": { + "has-flag": "^3.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/supports-preserve-symlinks-flag": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/supports-preserve-symlinks-flag/-/supports-preserve-symlinks-flag-1.0.0.tgz", + "integrity": "sha512-ot0WnXS9fgdkgIcePe6RHNk1WA8+muPa6cSjeR3V8K27q9BB1rTE3R1p7Hv0z1ZyAc8s6Vvv8DIyWf681MAt0w==", + "engines": { + "node": ">= 0.4" + }, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/test-exclude": { + "version": "6.0.0", + "resolved": "https://registry.npmjs.org/test-exclude/-/test-exclude-6.0.0.tgz", + "integrity": "sha512-cAGWPIyOHU6zlmg88jwm7VRyXnMN7iV68OGAbYDk/Mh/xC/pzVPlQtY6ngoIH/5/tciuhGfvESU8GrHrcxD56w==", + "dependencies": { + "@istanbuljs/schema": "^0.1.2", + "glob": "^7.1.4", + "minimatch": "^3.0.4" + }, + "engines": { + "node": ">=8" } }, - "tmp": { - "version": "0.2.1", - "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.1.tgz", - "integrity": "sha512-76SUhtfqR2Ijn+xllcI5P1oyannHNHByD80W1q447gU3mp9G9PSpGdWmjUOHRDPiHYacIk66W7ubDTuPF3BEtQ==", - "requires": { - "rimraf": "^3.0.0" + "node_modules/tmp": { + "version": "0.2.3", + "resolved": "https://registry.npmjs.org/tmp/-/tmp-0.2.3.tgz", + "integrity": "sha512-nZD7m9iCPC5g0pYmcaxogYKggSfLsdxl8of3Q/oIbqCqLLIO9IAF0GWjX1z9NZRHPiXv8Wex4yDCaZsgEw0Y8w==", + "engines": { + "node": ">=14.14" } }, - "tmp-promise": { + "node_modules/tmp-promise": { "version": "3.0.3", "resolved": "https://registry.npmjs.org/tmp-promise/-/tmp-promise-3.0.3.tgz", "integrity": "sha512-RwM7MoPojPxsOBYnyd2hy0bxtIlVrihNs9pj5SUvY8Zz1sQcQG2tG1hSr8PDxfgEB8RNKDhqbIlroIarSNDNsQ==", - "requires": { + "dependencies": { "tmp": "^0.2.0" } }, - "tunnel": { + "node_modules/tmpl": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/tmpl/-/tmpl-1.0.5.tgz", + "integrity": "sha512-3f0uOEAQwIqGuWW2MVzYg8fV/QNnc/IpuJNG837rLuczAaLVHslWHZQj4IGiEl5Hs3kkbhwL9Ab7Hrsmuj+Smw==" + }, + "node_modules/to-fast-properties": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/to-fast-properties/-/to-fast-properties-2.0.0.tgz", + "integrity": "sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==", + "engines": { + "node": ">=4" + } + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/tunnel": { "version": "0.0.6", "resolved": "https://registry.npmjs.org/tunnel/-/tunnel-0.0.6.tgz", - "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==" + "integrity": "sha512-1h/Lnq9yajKY2PEbBadPXj3VxsDDu844OnaAo52UVmIzIvwwtBPIuNvkjuzBlTWpfJyUbG3ez0KSBibQkj4ojg==", + "engines": { + "node": ">=0.6.11 <=0.7.0 || >=0.7.3" + } }, - "undici": { + "node_modules/undici": { "version": "5.28.4", "resolved": "https://registry.npmjs.org/undici/-/undici-5.28.4.tgz", "integrity": "sha512-72RFADWFqKmUb2hmmvNODKL3p9hcB6Gt2DOQMis1SEBaV6a4MH8soBvzg+95CYhCKPFedut2JY9bMfrDl9D23g==", - "requires": { + "dependencies": { "@fastify/busboy": "^2.0.0" + }, + "engines": { + "node": ">=14.0" + } + }, + "node_modules/undici-types": { + "version": "6.19.8", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-6.19.8.tgz", + "integrity": "sha512-ve2KP6f/JnbPBFyobGHuerC9g1FYGn/F8n1LWTwNxCEzd6IfqTwUQcNXgEtmmQ6DlRrC1hrSrBnCZPokRrDHjw==" + }, + "node_modules/unicode-canonical-property-names-ecmascript": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/unicode-canonical-property-names-ecmascript/-/unicode-canonical-property-names-ecmascript-2.0.1.tgz", + "integrity": "sha512-dA8WbNeb2a6oQzAQ55YlT5vQAWGV9WXOsi3SskE3bcCdM0P4SDd+24zS/OCacdRq5BkdsRj9q3Pg6YyQoxIGqg==", + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-ecmascript": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-ecmascript/-/unicode-match-property-ecmascript-2.0.0.tgz", + "integrity": "sha512-5kaZCrbp5mmbz5ulBkDkbY0SsPOjKqVS35VpL9ulMPfSl0J0Xsm+9Evphv9CoIZFwre7aJoa94AY6seMKGVN5Q==", + "dependencies": { + "unicode-canonical-property-names-ecmascript": "^2.0.0", + "unicode-property-aliases-ecmascript": "^2.0.0" + }, + "engines": { + "node": ">=4" + } + }, + "node_modules/unicode-match-property-value-ecmascript": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/unicode-match-property-value-ecmascript/-/unicode-match-property-value-ecmascript-2.2.0.tgz", + "integrity": "sha512-4IehN3V/+kkr5YeSSDDQG8QLqO26XpL2XP3GQtqwlT/QYSECAwFztxVHjlbh0+gjJ3XmNLS0zDsbgs9jWKExLg==", + "engines": { + "node": ">=4" } }, - "universal-user-agent": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-6.0.1.tgz", - "integrity": "sha512-yCzhz6FN2wU1NiiQRogkTQszlQSlpWaw8SvVegAc+bDxbzHgh1vX8uIe8OYyMH6DwH+sdTJsgMl36+mSMdRJIQ==" + "node_modules/unicode-property-aliases-ecmascript": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/unicode-property-aliases-ecmascript/-/unicode-property-aliases-ecmascript-2.1.0.tgz", + "integrity": "sha512-6t3foTQI9qne+OZoVQB/8x8rk2k1eVy1gRXhV3oFQ5T6R1dqQ1xtin3XqSlx3+ATBkliTaR/hHyJBm+LVPNM8w==", + "engines": { + "node": ">=4" + } + }, + "node_modules/universal-user-agent": { + "version": "7.0.2", + "resolved": "https://registry.npmjs.org/universal-user-agent/-/universal-user-agent-7.0.2.tgz", + "integrity": "sha512-0JCqzSKnStlRRQfCdowvqy3cy0Dvtlb8xecj/H8JFZuCze4rwjPZQOgvFvn0Ws/usCHQFGpyr+pB9adaGwXn4Q==" + }, + "node_modules/update-browserslist-db": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", + "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/browserslist" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/browserslist" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "escalade": "^3.1.2", + "picocolors": "^1.0.1" + }, + "bin": { + "update-browserslist-db": "cli.js" + }, + "peerDependencies": { + "browserslist": ">= 4.21.0" + } }, - "uuid": { + "node_modules/uuid": { "version": "8.3.2", "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==" + "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/walker": { + "version": "1.0.8", + "resolved": "https://registry.npmjs.org/walker/-/walker-1.0.8.tgz", + "integrity": "sha512-ts/8E8l5b7kY0vlWLewOkDXMmPdLcVV4GmOQLyxuSswIJsweeFZtAsMF7k1Nszz+TYBQrlYRmzOnr398y1JemQ==", + "dependencies": { + "makeerror": "1.0.12" + } }, - "wrappy": { + "node_modules/wrappy": { "version": "1.0.2", "resolved": "https://registry.npmjs.org/wrappy/-/wrappy-1.0.2.tgz", "integrity": "sha512-l4Sp/DRseor9wL6EvV2+TuQn63dMkPjZ/sp9XkghTEbV9KlPS1xUsZ3u7/IQO4wxtcFB4bgpQPRcR3QCvezPcQ==" + }, + "node_modules/write-file-atomic": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/write-file-atomic/-/write-file-atomic-4.0.2.tgz", + "integrity": "sha512-7KxauUdBmSdWnmpaGFg+ppNjKF8uNLry8LyzjauQDOVONfFLNKrKvQOxZ/VuTIcS/gge/YNahf5RIIQWTSarlg==", + "dependencies": { + "imurmurhash": "^0.1.4", + "signal-exit": "^3.0.7" + }, + "engines": { + "node": "^12.13.0 || ^14.15.0 || >=16.0.0" + } + }, + "node_modules/yallist": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/yallist/-/yallist-3.1.1.tgz", + "integrity": "sha512-a4UGQaWPH59mOXUYnAG2ewncQS4i4F43Tv3JoAM+s2VDAmS9NsK8GpDMLrCHPksFT7h3K6TOoUNn2pb7RoXx4g==" } } } diff --git a/package.json b/package.json index 5fd177b..4cbd688 100644 --- a/package.json +++ b/package.json @@ -23,7 +23,7 @@ "dependencies": { "@actions/core": "^1.10.0", "@actions/exec": "^1.1.1", - "@zaproxy/actions-common-scans": "^1.1.0", + "@zaproxy/actions-common-scans": "^1.2.1", "lodash": "^4.17.21" }, "devDependencies": {